blob: 2ffc398287b5111d7ad6128e331c8d52c861bd03 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bikb13c65b2017-03-21 20:14:07 -0700143 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000144 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 __ Bind(GetEntryLabel());
Aart Bikb13c65b2017-03-21 20:14:07 -0700146 SaveLiveRegisters(codegen, locations); // only saves full width XMM for SIMD
Serban Constantinescuba45db02016-07-12 22:53:02 +0100147 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000148 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bikb13c65b2017-03-21 20:14:07 -0700149 RestoreLiveRegisters(codegen, locations); // only saves full width XMM for SIMD
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100150 if (successor_ == nullptr) {
151 __ jmp(GetReturnLabel());
152 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000155 }
156
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100157 Label* GetReturnLabel() {
158 DCHECK(successor_ == nullptr);
159 return &return_label_;
160 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000161
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100162 HBasicBlock* GetSuccessor() const {
163 return successor_;
164 }
165
Alexandre Rames9931f312015-06-19 14:47:01 +0100166 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
167
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000168 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100169 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170 Label return_label_;
171
172 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
173};
174
Andreas Gampe85b62f22015-09-09 13:15:38 -0700175class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100177 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000178 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100179
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000180 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100181 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000182 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100183 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000184 if (instruction_->CanThrowIntoCatchBlock()) {
185 // Live registers will be restored in the catch block if caught.
186 SaveLiveRegisters(codegen, instruction_->GetLocations());
187 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400188 // Are we using an array length from memory?
189 HInstruction* array_length = instruction_->InputAt(1);
190 Location length_loc = locations->InAt(1);
191 InvokeRuntimeCallingConvention calling_convention;
192 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
193 // Load the array length into our temporary.
194 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
195 Location array_loc = array_length->GetLocations()->InAt(0);
196 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
197 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
198 // Check for conflicts with index.
199 if (length_loc.Equals(locations->InAt(0))) {
200 // We know we aren't using parameter 2.
201 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
202 }
203 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700204 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100205 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700206 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400207 }
208
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000209 // We're moving two locations to locations that could overlap, so we need a parallel
210 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000211 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100212 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000213 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100214 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400215 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100216 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
217 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100218 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
219 ? kQuickThrowStringBounds
220 : kQuickThrowArrayBounds;
221 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100222 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000223 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100224 }
225
Alexandre Rames8158f282015-08-07 10:26:17 +0100226 bool IsFatal() const OVERRIDE { return true; }
227
Alexandre Rames9931f312015-06-19 14:47:01 +0100228 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
229
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100230 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100231 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
232};
233
Andreas Gampe85b62f22015-09-09 13:15:38 -0700234class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000236 LoadClassSlowPathX86_64(HLoadClass* cls,
237 HInstruction* at,
238 uint32_t dex_pc,
239 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000240 : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000241 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
242 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100243
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000244 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000245 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000246 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100247 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000249 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000250
Vladimir Markoea4c1262017-02-06 19:59:33 +0000251 // Custom calling convention: RAX serves as both input and output.
252 __ movl(CpuRegister(RAX), Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100253 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000254 instruction_,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000255 dex_pc_,
256 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000257 if (do_clinit_) {
258 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
259 } else {
260 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
261 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100262
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000263 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000264 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000265 if (out.IsValid()) {
266 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000267 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000268 }
269
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000270 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000271 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
272 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
273 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
274 DCHECK(out.IsValid());
275 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
276 locations->Out().AsRegister<CpuRegister>());
277 Label* fixup_label = x86_64_codegen->NewTypeBssEntryPatch(cls_);
278 __ Bind(fixup_label);
279 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100280 __ jmp(GetExitLabel());
281 }
282
Alexandre Rames9931f312015-06-19 14:47:01 +0100283 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
284
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 // The class this slow path will load.
287 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100288
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000289 // The dex PC of `at_`.
290 const uint32_t dex_pc_;
291
292 // Whether to initialize the class.
293 const bool do_clinit_;
294
295 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100296};
297
Vladimir Markoaad75c62016-10-03 08:46:48 +0000298class LoadStringSlowPathX86_64 : public SlowPathCode {
299 public:
300 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
301
302 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
303 LocationSummary* locations = instruction_->GetLocations();
304 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
305
306 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
307 __ Bind(GetEntryLabel());
308 SaveLiveRegisters(codegen, locations);
309
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000310 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100311 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000312 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000313 x86_64_codegen->InvokeRuntime(kQuickResolveString,
314 instruction_,
315 instruction_->GetDexPc(),
316 this);
317 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
318 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
319 RestoreLiveRegisters(codegen, locations);
320
321 // Store the resolved String to the BSS entry.
322 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
323 locations->Out().AsRegister<CpuRegister>());
324 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
325 __ Bind(fixup_label);
326
327 __ jmp(GetExitLabel());
328 }
329
330 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
331
332 private:
333 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
334};
335
Andreas Gampe85b62f22015-09-09 13:15:38 -0700336class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000337 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000338 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000339 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000340
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000341 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000342 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100343 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000344 DCHECK(instruction_->IsCheckCast()
345 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
Roland Levillain0d5a2812015-11-13 10:07:31 +0000347 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000348 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000349
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350 if (!is_fatal_) {
351 SaveLiveRegisters(codegen, locations);
352 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000353
354 // We're moving two locations to locations that could overlap, so we need a parallel
355 // move resolver.
356 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800357 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800358 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
359 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800360 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
362 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100364 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 } else {
367 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800368 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
369 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000370 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 if (!is_fatal_) {
373 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000374 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000375 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 RestoreLiveRegisters(codegen, locations);
378 __ jmp(GetExitLabel());
379 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000380 }
381
Alexandre Rames9931f312015-06-19 14:47:01 +0100382 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
383
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000384 bool IsFatal() const OVERRIDE { return is_fatal_; }
385
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000386 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000388
389 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
390};
391
Andreas Gampe85b62f22015-09-09 13:15:38 -0700392class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393 public:
Aart Bik42249c32016-01-07 15:33:50 -0800394 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000395 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700396
397 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000398 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100400 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000401 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700402 }
403
Alexandre Rames9931f312015-06-19 14:47:01 +0100404 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
405
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
408};
409
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100410class ArraySetSlowPathX86_64 : public SlowPathCode {
411 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000412 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100413
414 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
415 LocationSummary* locations = instruction_->GetLocations();
416 __ Bind(GetEntryLabel());
417 SaveLiveRegisters(codegen, locations);
418
419 InvokeRuntimeCallingConvention calling_convention;
420 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
421 parallel_move.AddMove(
422 locations->InAt(0),
423 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
424 Primitive::kPrimNot,
425 nullptr);
426 parallel_move.AddMove(
427 locations->InAt(1),
428 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
429 Primitive::kPrimInt,
430 nullptr);
431 parallel_move.AddMove(
432 locations->InAt(2),
433 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
434 Primitive::kPrimNot,
435 nullptr);
436 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
437
Roland Levillain0d5a2812015-11-13 10:07:31 +0000438 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100439 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000440 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 RestoreLiveRegisters(codegen, locations);
442 __ jmp(GetExitLabel());
443 }
444
445 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
446
447 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100448 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
449};
450
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100451// Slow path marking an object reference `ref` during a read
452// barrier. The field `obj.field` in the object `obj` holding this
453// reference does not get updated by this slow path after marking (see
454// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
455//
456// This means that after the execution of this slow path, `ref` will
457// always be up-to-date, but `obj.field` may not; i.e., after the
458// flip, `ref` will be a to-space reference, but `obj.field` will
459// probably still be a from-space reference (unless it gets updated by
460// another thread, or if another thread installed another object
461// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000462class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
463 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100464 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
465 Location ref,
466 bool unpoison_ref_before_marking)
467 : SlowPathCode(instruction),
468 ref_(ref),
469 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000470 DCHECK(kEmitCompilerReadBarrier);
471 }
472
473 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
474
475 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
476 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100477 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
478 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000479 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000481 DCHECK(instruction_->IsInstanceFieldGet() ||
482 instruction_->IsStaticFieldGet() ||
483 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100484 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000485 instruction_->IsLoadClass() ||
486 instruction_->IsLoadString() ||
487 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100488 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100489 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
490 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000491 << "Unexpected instruction in read barrier marking slow path: "
492 << instruction_->DebugName();
493
494 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100495 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000496 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100497 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000498 }
Roland Levillain4359e612016-07-20 11:32:19 +0100499 // No need to save live registers; it's taken care of by the
500 // entrypoint. Also, there is no need to update the stack mask,
501 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000502 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 DCHECK_NE(ref_reg, RSP);
504 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100505 // "Compact" slow path, saving two moves.
506 //
507 // Instead of using the standard runtime calling convention (input
508 // and output in R0):
509 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100510 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100511 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100512 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100513 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100514 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100515 // of a dedicated entrypoint:
516 //
517 // rX <- ReadBarrierMarkRegX(rX)
518 //
519 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100521 // This runtime call does not require a stack map.
522 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000523 __ jmp(GetExitLabel());
524 }
525
526 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100527 // The location (register) of the marked object reference.
528 const Location ref_;
529 // Should the reference in `ref_` be unpoisoned prior to marking it?
530 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000531
532 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
533};
534
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100535// Slow path marking an object reference `ref` during a read barrier,
536// and if needed, atomically updating the field `obj.field` in the
537// object `obj` holding this reference after marking (contrary to
538// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
539// `obj.field`).
540//
541// This means that after the execution of this slow path, both `ref`
542// and `obj.field` will be up-to-date; i.e., after the flip, both will
543// hold the same to-space reference (unless another thread installed
544// another object reference (different from `ref`) in `obj.field`).
545class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
546 public:
547 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
548 Location ref,
549 CpuRegister obj,
550 const Address& field_addr,
551 bool unpoison_ref_before_marking,
552 CpuRegister temp1,
553 CpuRegister temp2)
554 : SlowPathCode(instruction),
555 ref_(ref),
556 obj_(obj),
557 field_addr_(field_addr),
558 unpoison_ref_before_marking_(unpoison_ref_before_marking),
559 temp1_(temp1),
560 temp2_(temp2) {
561 DCHECK(kEmitCompilerReadBarrier);
562 }
563
564 const char* GetDescription() const OVERRIDE {
565 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
566 }
567
568 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
569 LocationSummary* locations = instruction_->GetLocations();
570 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
571 Register ref_reg = ref_cpu_reg.AsRegister();
572 DCHECK(locations->CanCall());
573 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
574 // This slow path is only used by the UnsafeCASObject intrinsic.
575 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
576 << "Unexpected instruction in read barrier marking and field updating slow path: "
577 << instruction_->DebugName();
578 DCHECK(instruction_->GetLocations()->Intrinsified());
579 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
580
581 __ Bind(GetEntryLabel());
582 if (unpoison_ref_before_marking_) {
583 // Object* ref = ref_addr->AsMirrorPtr()
584 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
585 }
586
587 // Save the old (unpoisoned) reference.
588 __ movl(temp1_, ref_cpu_reg);
589
590 // No need to save live registers; it's taken care of by the
591 // entrypoint. Also, there is no need to update the stack mask,
592 // as this runtime call will not trigger a garbage collection.
593 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
594 DCHECK_NE(ref_reg, RSP);
595 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
596 // "Compact" slow path, saving two moves.
597 //
598 // Instead of using the standard runtime calling convention (input
599 // and output in R0):
600 //
601 // RDI <- ref
602 // RAX <- ReadBarrierMark(RDI)
603 // ref <- RAX
604 //
605 // we just use rX (the register containing `ref`) as input and output
606 // of a dedicated entrypoint:
607 //
608 // rX <- ReadBarrierMarkRegX(rX)
609 //
610 int32_t entry_point_offset =
611 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
612 // This runtime call does not require a stack map.
613 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
614
615 // If the new reference is different from the old reference,
616 // update the field in the holder (`*field_addr`).
617 //
618 // Note that this field could also hold a different object, if
619 // another thread had concurrently changed it. In that case, the
620 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
621 // operation below would abort the CAS, leaving the field as-is.
622 NearLabel done;
623 __ cmpl(temp1_, ref_cpu_reg);
624 __ j(kEqual, &done);
625
626 // Update the the holder's field atomically. This may fail if
627 // mutator updates before us, but it's OK. This is achived
628 // using a strong compare-and-set (CAS) operation with relaxed
629 // memory synchronization ordering, where the expected value is
630 // the old reference and the desired value is the new reference.
631 // This operation is implemented with a 32-bit LOCK CMPXLCHG
632 // instruction, which requires the expected value (the old
633 // reference) to be in EAX. Save RAX beforehand, and move the
634 // expected value (stored in `temp1_`) into EAX.
635 __ movq(temp2_, CpuRegister(RAX));
636 __ movl(CpuRegister(RAX), temp1_);
637
638 // Convenience aliases.
639 CpuRegister base = obj_;
640 CpuRegister expected = CpuRegister(RAX);
641 CpuRegister value = ref_cpu_reg;
642
643 bool base_equals_value = (base.AsRegister() == value.AsRegister());
644 Register value_reg = ref_reg;
645 if (kPoisonHeapReferences) {
646 if (base_equals_value) {
647 // If `base` and `value` are the same register location, move
648 // `value_reg` to a temporary register. This way, poisoning
649 // `value_reg` won't invalidate `base`.
650 value_reg = temp1_.AsRegister();
651 __ movl(CpuRegister(value_reg), base);
652 }
653
654 // Check that the register allocator did not assign the location
655 // of `expected` (RAX) to `value` nor to `base`, so that heap
656 // poisoning (when enabled) works as intended below.
657 // - If `value` were equal to `expected`, both references would
658 // be poisoned twice, meaning they would not be poisoned at
659 // all, as heap poisoning uses address negation.
660 // - If `base` were equal to `expected`, poisoning `expected`
661 // would invalidate `base`.
662 DCHECK_NE(value_reg, expected.AsRegister());
663 DCHECK_NE(base.AsRegister(), expected.AsRegister());
664
665 __ PoisonHeapReference(expected);
666 __ PoisonHeapReference(CpuRegister(value_reg));
667 }
668
669 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
670
671 // If heap poisoning is enabled, we need to unpoison the values
672 // that were poisoned earlier.
673 if (kPoisonHeapReferences) {
674 if (base_equals_value) {
675 // `value_reg` has been moved to a temporary register, no need
676 // to unpoison it.
677 } else {
678 __ UnpoisonHeapReference(CpuRegister(value_reg));
679 }
680 // No need to unpoison `expected` (RAX), as it is be overwritten below.
681 }
682
683 // Restore RAX.
684 __ movq(CpuRegister(RAX), temp2_);
685
686 __ Bind(&done);
687 __ jmp(GetExitLabel());
688 }
689
690 private:
691 // The location (register) of the marked object reference.
692 const Location ref_;
693 // The register containing the object holding the marked object reference field.
694 const CpuRegister obj_;
695 // The address of the marked reference field. The base of this address must be `obj_`.
696 const Address field_addr_;
697
698 // Should the reference in `ref_` be unpoisoned prior to marking it?
699 const bool unpoison_ref_before_marking_;
700
701 const CpuRegister temp1_;
702 const CpuRegister temp2_;
703
704 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
705};
706
Roland Levillain0d5a2812015-11-13 10:07:31 +0000707// Slow path generating a read barrier for a heap reference.
708class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
709 public:
710 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
711 Location out,
712 Location ref,
713 Location obj,
714 uint32_t offset,
715 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000716 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000717 out_(out),
718 ref_(ref),
719 obj_(obj),
720 offset_(offset),
721 index_(index) {
722 DCHECK(kEmitCompilerReadBarrier);
723 // If `obj` is equal to `out` or `ref`, it means the initial
724 // object has been overwritten by (or after) the heap object
725 // reference load to be instrumented, e.g.:
726 //
727 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000728 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000729 //
730 // In that case, we have lost the information about the original
731 // object, and the emitted read barrier cannot work properly.
732 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
733 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
734}
735
736 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
737 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
738 LocationSummary* locations = instruction_->GetLocations();
739 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
740 DCHECK(locations->CanCall());
741 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100742 DCHECK(instruction_->IsInstanceFieldGet() ||
743 instruction_->IsStaticFieldGet() ||
744 instruction_->IsArrayGet() ||
745 instruction_->IsInstanceOf() ||
746 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100747 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000748 << "Unexpected instruction in read barrier for heap reference slow path: "
749 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000750
751 __ Bind(GetEntryLabel());
752 SaveLiveRegisters(codegen, locations);
753
754 // We may have to change the index's value, but as `index_` is a
755 // constant member (like other "inputs" of this slow path),
756 // introduce a copy of it, `index`.
757 Location index = index_;
758 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100759 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000760 if (instruction_->IsArrayGet()) {
761 // Compute real offset and store it in index_.
762 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
763 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
764 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
765 // We are about to change the value of `index_reg` (see the
766 // calls to art::x86_64::X86_64Assembler::shll and
767 // art::x86_64::X86_64Assembler::AddImmediate below), but it
768 // has not been saved by the previous call to
769 // art::SlowPathCode::SaveLiveRegisters, as it is a
770 // callee-save register --
771 // art::SlowPathCode::SaveLiveRegisters does not consider
772 // callee-save registers, as it has been designed with the
773 // assumption that callee-save registers are supposed to be
774 // handled by the called function. So, as a callee-save
775 // register, `index_reg` _would_ eventually be saved onto
776 // the stack, but it would be too late: we would have
777 // changed its value earlier. Therefore, we manually save
778 // it here into another freely available register,
779 // `free_reg`, chosen of course among the caller-save
780 // registers (as a callee-save `free_reg` register would
781 // exhibit the same problem).
782 //
783 // Note we could have requested a temporary register from
784 // the register allocator instead; but we prefer not to, as
785 // this is a slow path, and we know we can find a
786 // caller-save register that is available.
787 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
788 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
789 index_reg = free_reg;
790 index = Location::RegisterLocation(index_reg);
791 } else {
792 // The initial register stored in `index_` has already been
793 // saved in the call to art::SlowPathCode::SaveLiveRegisters
794 // (as it is not a callee-save register), so we can freely
795 // use it.
796 }
797 // Shifting the index value contained in `index_reg` by the
798 // scale factor (2) cannot overflow in practice, as the
799 // runtime is unable to allocate object arrays with a size
800 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
801 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
802 static_assert(
803 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
804 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
805 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
806 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100807 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
808 // intrinsics, `index_` is not shifted by a scale factor of 2
809 // (as in the case of ArrayGet), as it is actually an offset
810 // to an object field within an object.
811 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000812 DCHECK(instruction_->GetLocations()->Intrinsified());
813 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
814 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
815 << instruction_->AsInvoke()->GetIntrinsic();
816 DCHECK_EQ(offset_, 0U);
817 DCHECK(index_.IsRegister());
818 }
819 }
820
821 // We're moving two or three locations to locations that could
822 // overlap, so we need a parallel move resolver.
823 InvokeRuntimeCallingConvention calling_convention;
824 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
825 parallel_move.AddMove(ref_,
826 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
827 Primitive::kPrimNot,
828 nullptr);
829 parallel_move.AddMove(obj_,
830 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
831 Primitive::kPrimNot,
832 nullptr);
833 if (index.IsValid()) {
834 parallel_move.AddMove(index,
835 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
836 Primitive::kPrimInt,
837 nullptr);
838 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
839 } else {
840 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
841 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
842 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100843 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000844 instruction_,
845 instruction_->GetDexPc(),
846 this);
847 CheckEntrypointTypes<
848 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
849 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
850
851 RestoreLiveRegisters(codegen, locations);
852 __ jmp(GetExitLabel());
853 }
854
855 const char* GetDescription() const OVERRIDE {
856 return "ReadBarrierForHeapReferenceSlowPathX86_64";
857 }
858
859 private:
860 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
861 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
862 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
863 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
864 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
865 return static_cast<CpuRegister>(i);
866 }
867 }
868 // We shall never fail to find a free caller-save register, as
869 // there are more than two core caller-save registers on x86-64
870 // (meaning it is possible to find one which is different from
871 // `ref` and `obj`).
872 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
873 LOG(FATAL) << "Could not find a free caller-save register";
874 UNREACHABLE();
875 }
876
Roland Levillain0d5a2812015-11-13 10:07:31 +0000877 const Location out_;
878 const Location ref_;
879 const Location obj_;
880 const uint32_t offset_;
881 // An additional location containing an index to an array.
882 // Only used for HArrayGet and the UnsafeGetObject &
883 // UnsafeGetObjectVolatile intrinsics.
884 const Location index_;
885
886 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
887};
888
889// Slow path generating a read barrier for a GC root.
890class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
891 public:
892 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000893 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000894 DCHECK(kEmitCompilerReadBarrier);
895 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000896
897 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
898 LocationSummary* locations = instruction_->GetLocations();
899 DCHECK(locations->CanCall());
900 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000901 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
902 << "Unexpected instruction in read barrier for GC root slow path: "
903 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000904
905 __ Bind(GetEntryLabel());
906 SaveLiveRegisters(codegen, locations);
907
908 InvokeRuntimeCallingConvention calling_convention;
909 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
910 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100911 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000912 instruction_,
913 instruction_->GetDexPc(),
914 this);
915 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
916 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
917
918 RestoreLiveRegisters(codegen, locations);
919 __ jmp(GetExitLabel());
920 }
921
922 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
923
924 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000925 const Location out_;
926 const Location root_;
927
928 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
929};
930
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100931#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100932// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
933#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100934
Roland Levillain4fa13f62015-07-06 18:11:54 +0100935inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700936 switch (cond) {
937 case kCondEQ: return kEqual;
938 case kCondNE: return kNotEqual;
939 case kCondLT: return kLess;
940 case kCondLE: return kLessEqual;
941 case kCondGT: return kGreater;
942 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700943 case kCondB: return kBelow;
944 case kCondBE: return kBelowEqual;
945 case kCondA: return kAbove;
946 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700947 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100948 LOG(FATAL) << "Unreachable";
949 UNREACHABLE();
950}
951
Aart Bike9f37602015-10-09 11:15:55 -0700952// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100953inline Condition X86_64FPCondition(IfCondition cond) {
954 switch (cond) {
955 case kCondEQ: return kEqual;
956 case kCondNE: return kNotEqual;
957 case kCondLT: return kBelow;
958 case kCondLE: return kBelowEqual;
959 case kCondGT: return kAbove;
960 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700961 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100962 };
963 LOG(FATAL) << "Unreachable";
964 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700965}
966
Vladimir Markodc151b22015-10-15 18:02:30 +0100967HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
968 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100969 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000970 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100971}
972
Serguei Katkov288c7a82016-05-16 11:53:15 +0600973Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
974 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800975 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000976 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
977 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100978 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000979 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100980 uint32_t offset =
981 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
982 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000983 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100984 }
Vladimir Marko58155012015-08-19 12:49:41 +0000985 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000986 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000987 break;
988 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Vladimir Marko2d73f332017-03-16 15:55:49 +0000989 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
Vladimir Marko58155012015-08-19 12:49:41 +0000990 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000991 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000992 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000993 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000994 // Bind a new fixup label at the end of the "movl" insn.
995 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000996 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000997 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000998 }
Vladimir Marko58155012015-08-19 12:49:41 +0000999 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00001000 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001001 Register method_reg;
1002 CpuRegister reg = temp.AsRegister<CpuRegister>();
1003 if (current_method.IsRegister()) {
1004 method_reg = current_method.AsRegister<Register>();
1005 } else {
1006 DCHECK(invoke->GetLocations()->Intrinsified());
1007 DCHECK(!current_method.IsValid());
1008 method_reg = reg.AsRegister();
1009 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1010 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001011 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001012 __ movq(reg,
1013 Address(CpuRegister(method_reg),
1014 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001015 // temp = temp[index_in_cache];
1016 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1017 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001018 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1019 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001020 }
Vladimir Marko58155012015-08-19 12:49:41 +00001021 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001022 return callee_method;
1023}
1024
1025void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1026 Location temp) {
1027 // All registers are assumed to be correctly set up.
1028 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001029
1030 switch (invoke->GetCodePtrLocation()) {
1031 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1032 __ call(&frame_entry_label_);
1033 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001034 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1035 // (callee_method + offset_of_quick_compiled_code)()
1036 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1037 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001038 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001039 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001040 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001041
1042 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001043}
1044
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001045void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1046 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1047 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1048 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001049
1050 // Use the calling convention instead of the location of the receiver, as
1051 // intrinsics may have put the receiver in a different register. In the intrinsics
1052 // slow path, the arguments have been moved to the right place, so here we are
1053 // guaranteed that the receiver is the first register of the calling convention.
1054 InvokeDexCallingConvention calling_convention;
1055 Register receiver = calling_convention.GetRegisterAt(0);
1056
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001057 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001058 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001059 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001060 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001061 // Instead of simply (possibly) unpoisoning `temp` here, we should
1062 // emit a read barrier for the previous class reference load.
1063 // However this is not required in practice, as this is an
1064 // intermediate/temporary reference and because the current
1065 // concurrent copying collector keeps the from-space memory
1066 // intact/accessible until the end of the marking phase (the
1067 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001068 __ MaybeUnpoisonHeapReference(temp);
1069 // temp = temp->GetMethodAt(method_offset);
1070 __ movq(temp, Address(temp, method_offset));
1071 // call temp->GetEntryPoint();
1072 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001073 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001074}
1075
Vladimir Markoaad75c62016-10-03 08:46:48 +00001076void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1077 DCHECK(GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001078 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001079 __ Bind(&string_patches_.back().label);
1080}
1081
Vladimir Marko1998cd02017-01-13 13:02:58 +00001082void CodeGeneratorX86_64::RecordBootTypePatch(HLoadClass* load_class) {
1083 boot_image_type_patches_.emplace_back(load_class->GetDexFile(),
1084 load_class->GetTypeIndex().index_);
1085 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001086}
1087
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001088Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00001089 type_bss_entry_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
1090 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001091}
1092
Vladimir Markoaad75c62016-10-03 08:46:48 +00001093Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1094 DCHECK(!GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001095 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001096 return &string_patches_.back().label;
1097}
1098
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001099Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1100 uint32_t element_offset) {
1101 // Add a patch entry and return the label.
1102 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1103 return &pc_relative_dex_cache_patches_.back().label;
1104}
1105
Vladimir Markoaad75c62016-10-03 08:46:48 +00001106// The label points to the end of the "movl" or another instruction but the literal offset
1107// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1108constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1109
1110template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1111inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1112 const ArenaDeque<PatchInfo<Label>>& infos,
1113 ArenaVector<LinkerPatch>* linker_patches) {
1114 for (const PatchInfo<Label>& info : infos) {
1115 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1116 linker_patches->push_back(
1117 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1118 }
1119}
1120
Vladimir Marko58155012015-08-19 12:49:41 +00001121void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1122 DCHECK(linker_patches->empty());
1123 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001124 pc_relative_dex_cache_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001125 string_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001126 boot_image_type_patches_.size() +
1127 type_bss_entry_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001128 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001129 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1130 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001131 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00001132 DCHECK(boot_image_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00001133 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
1134 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001135 // These are always PC-relative, see GetSupportedLoadClassKind()/GetSupportedLoadStringKind().
Vladimir Marko1998cd02017-01-13 13:02:58 +00001136 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(boot_image_type_patches_,
1137 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001138 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001139 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00001140 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1141 linker_patches);
1142 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001143}
1144
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001145void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001146 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001147}
1148
1149void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001150 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001151}
1152
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001153size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1154 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1155 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001156}
1157
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001158size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1159 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1160 return kX86_64WordSize;
1161}
1162
1163size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001164 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001165 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001166 } else {
1167 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1168 }
1169 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001170}
1171
1172size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001173 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001174 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001175 } else {
1176 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1177 }
1178 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001179}
1180
Calin Juravle175dc732015-08-25 15:42:32 +01001181void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1182 HInstruction* instruction,
1183 uint32_t dex_pc,
1184 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001185 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001186 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1187 if (EntrypointRequiresStackMap(entrypoint)) {
1188 RecordPcInfo(instruction, dex_pc, slow_path);
1189 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001190}
1191
Roland Levillaindec8f632016-07-22 17:10:06 +01001192void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1193 HInstruction* instruction,
1194 SlowPathCode* slow_path) {
1195 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001196 GenerateInvokeRuntime(entry_point_offset);
1197}
1198
1199void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001200 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1201}
1202
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001203static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001204// Use a fake return address register to mimic Quick.
1205static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001206CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001207 const X86_64InstructionSetFeatures& isa_features,
1208 const CompilerOptions& compiler_options,
1209 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001210 : CodeGenerator(graph,
1211 kNumberOfCpuRegisters,
1212 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001213 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001214 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1215 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001216 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001217 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1218 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001219 compiler_options,
1220 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001221 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001222 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001223 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001224 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001225 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001226 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001227 constant_area_start_(0),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001228 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001229 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001230 boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1231 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001232 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001233 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1234 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001235 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1236}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001237
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001238InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1239 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001240 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001241 assembler_(codegen->GetAssembler()),
1242 codegen_(codegen) {}
1243
David Brazdil58282f42016-01-14 12:45:10 +00001244void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001245 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001246 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001247
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001248 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001249 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001250}
1251
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001252static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001253 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001254}
David Srbecky9d8606d2015-04-12 09:35:32 +01001255
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001256static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001257 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001258}
1259
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001260void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001261 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001262 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001263 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001264 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001265 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001266
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001267 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001268 __ testq(CpuRegister(RAX), Address(
1269 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001270 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001271 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001272
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001273 if (HasEmptyFrame()) {
1274 return;
1275 }
1276
Nicolas Geoffray98893962015-01-21 12:32:32 +00001277 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001278 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001279 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001280 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001281 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1282 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001283 }
1284 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001285
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001286 int adjust = GetFrameSize() - GetCoreSpillSize();
1287 __ subq(CpuRegister(RSP), Immediate(adjust));
1288 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001289 uint32_t xmm_spill_location = GetFpuSpillStart();
1290 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001291
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001292 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1293 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001294 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1295 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1296 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001297 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001298 }
1299
Mingyao Yang063fc772016-08-02 11:02:54 -07001300 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1301 // Initialize should_deoptimize flag to 0.
1302 __ movl(Address(CpuRegister(RSP), xmm_spill_location - kShouldDeoptimizeFlagSize),
1303 Immediate(0));
1304 }
1305
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001306 // Save the current method if we need it. Note that we do not
1307 // do this in HCurrentMethod, as the instruction might have been removed
1308 // in the SSA graph.
1309 if (RequiresCurrentMethod()) {
1310 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1311 CpuRegister(kMethodRegisterArgument));
1312 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001313}
1314
1315void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001316 __ cfi().RememberState();
1317 if (!HasEmptyFrame()) {
1318 uint32_t xmm_spill_location = GetFpuSpillStart();
1319 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1320 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1321 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1322 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1323 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1324 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1325 }
1326 }
1327
1328 int adjust = GetFrameSize() - GetCoreSpillSize();
1329 __ addq(CpuRegister(RSP), Immediate(adjust));
1330 __ cfi().AdjustCFAOffset(-adjust);
1331
1332 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1333 Register reg = kCoreCalleeSaves[i];
1334 if (allocated_registers_.ContainsCoreRegister(reg)) {
1335 __ popq(CpuRegister(reg));
1336 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1337 __ cfi().Restore(DWARFReg(reg));
1338 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001339 }
1340 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001341 __ ret();
1342 __ cfi().RestoreState();
1343 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001344}
1345
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001346void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1347 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001348}
1349
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001350void CodeGeneratorX86_64::Move(Location destination, Location source) {
1351 if (source.Equals(destination)) {
1352 return;
1353 }
1354 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001355 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001356 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001357 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001358 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001359 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001360 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001361 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1362 } else if (source.IsConstant()) {
1363 HConstant* constant = source.GetConstant();
1364 if (constant->IsLongConstant()) {
1365 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1366 } else {
1367 Load32BitValue(dest, GetInt32ValueOf(constant));
1368 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001369 } else {
1370 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001371 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001372 }
1373 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001374 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001375 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001376 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001377 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001378 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1379 } else if (source.IsConstant()) {
1380 HConstant* constant = source.GetConstant();
1381 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1382 if (constant->IsFloatConstant()) {
1383 Load32BitValue(dest, static_cast<int32_t>(value));
1384 } else {
1385 Load64BitValue(dest, value);
1386 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001387 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001388 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001389 } else {
1390 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001391 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001392 }
1393 } else if (destination.IsStackSlot()) {
1394 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001395 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001396 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001397 } else if (source.IsFpuRegister()) {
1398 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001399 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001400 } else if (source.IsConstant()) {
1401 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001402 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001403 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001404 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001405 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001406 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1407 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001408 }
1409 } else {
1410 DCHECK(destination.IsDoubleStackSlot());
1411 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001412 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001413 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001414 } else if (source.IsFpuRegister()) {
1415 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001416 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001417 } else if (source.IsConstant()) {
1418 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001419 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1420 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001421 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001422 } else {
1423 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001424 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1425 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001426 }
1427 }
1428}
1429
Calin Juravle175dc732015-08-25 15:42:32 +01001430void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1431 DCHECK(location.IsRegister());
1432 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1433}
1434
Calin Juravlee460d1d2015-09-29 04:52:17 +01001435void CodeGeneratorX86_64::MoveLocation(
1436 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1437 Move(dst, src);
1438}
1439
1440void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1441 if (location.IsRegister()) {
1442 locations->AddTemp(location);
1443 } else {
1444 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1445 }
1446}
1447
David Brazdilfc6a86a2015-06-26 10:33:45 +00001448void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001449 DCHECK(!successor->IsExitBlock());
1450
1451 HBasicBlock* block = got->GetBlock();
1452 HInstruction* previous = got->GetPrevious();
1453
1454 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001455 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001456 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1457 return;
1458 }
1459
1460 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1461 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1462 }
1463 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001464 __ jmp(codegen_->GetLabelOf(successor));
1465 }
1466}
1467
David Brazdilfc6a86a2015-06-26 10:33:45 +00001468void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1469 got->SetLocations(nullptr);
1470}
1471
1472void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1473 HandleGoto(got, got->GetSuccessor());
1474}
1475
1476void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1477 try_boundary->SetLocations(nullptr);
1478}
1479
1480void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1481 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1482 if (!successor->IsExitBlock()) {
1483 HandleGoto(try_boundary, successor);
1484 }
1485}
1486
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001487void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1488 exit->SetLocations(nullptr);
1489}
1490
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001491void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001492}
1493
Mark Mendell152408f2015-12-31 12:28:50 -05001494template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001495void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001496 LabelType* true_label,
1497 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001498 if (cond->IsFPConditionTrueIfNaN()) {
1499 __ j(kUnordered, true_label);
1500 } else if (cond->IsFPConditionFalseIfNaN()) {
1501 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001502 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001503 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001504}
1505
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001506void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001507 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001508
Mark Mendellc4701932015-04-10 13:18:51 -04001509 Location left = locations->InAt(0);
1510 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001511 Primitive::Type type = condition->InputAt(0)->GetType();
1512 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001513 case Primitive::kPrimBoolean:
1514 case Primitive::kPrimByte:
1515 case Primitive::kPrimChar:
1516 case Primitive::kPrimShort:
1517 case Primitive::kPrimInt:
1518 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001519 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001520 break;
1521 }
Mark Mendellc4701932015-04-10 13:18:51 -04001522 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001523 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001524 break;
1525 }
1526 case Primitive::kPrimFloat: {
1527 if (right.IsFpuRegister()) {
1528 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1529 } else if (right.IsConstant()) {
1530 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1531 codegen_->LiteralFloatAddress(
1532 right.GetConstant()->AsFloatConstant()->GetValue()));
1533 } else {
1534 DCHECK(right.IsStackSlot());
1535 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1536 Address(CpuRegister(RSP), right.GetStackIndex()));
1537 }
Mark Mendellc4701932015-04-10 13:18:51 -04001538 break;
1539 }
1540 case Primitive::kPrimDouble: {
1541 if (right.IsFpuRegister()) {
1542 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1543 } else if (right.IsConstant()) {
1544 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1545 codegen_->LiteralDoubleAddress(
1546 right.GetConstant()->AsDoubleConstant()->GetValue()));
1547 } else {
1548 DCHECK(right.IsDoubleStackSlot());
1549 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1550 Address(CpuRegister(RSP), right.GetStackIndex()));
1551 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001552 break;
1553 }
1554 default:
1555 LOG(FATAL) << "Unexpected condition type " << type;
1556 }
1557}
1558
1559template<class LabelType>
1560void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1561 LabelType* true_target_in,
1562 LabelType* false_target_in) {
1563 // Generated branching requires both targets to be explicit. If either of the
1564 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1565 LabelType fallthrough_target;
1566 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1567 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1568
1569 // Generate the comparison to set the CC.
1570 GenerateCompareTest(condition);
1571
1572 // Now generate the correct jump(s).
1573 Primitive::Type type = condition->InputAt(0)->GetType();
1574 switch (type) {
1575 case Primitive::kPrimLong: {
1576 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1577 break;
1578 }
1579 case Primitive::kPrimFloat: {
1580 GenerateFPJumps(condition, true_target, false_target);
1581 break;
1582 }
1583 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001584 GenerateFPJumps(condition, true_target, false_target);
1585 break;
1586 }
1587 default:
1588 LOG(FATAL) << "Unexpected condition type " << type;
1589 }
1590
David Brazdil0debae72015-11-12 18:37:00 +00001591 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001592 __ jmp(false_target);
1593 }
David Brazdil0debae72015-11-12 18:37:00 +00001594
1595 if (fallthrough_target.IsLinked()) {
1596 __ Bind(&fallthrough_target);
1597 }
Mark Mendellc4701932015-04-10 13:18:51 -04001598}
1599
David Brazdil0debae72015-11-12 18:37:00 +00001600static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1601 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1602 // are set only strictly before `branch`. We can't use the eflags on long
1603 // conditions if they are materialized due to the complex branching.
1604 return cond->IsCondition() &&
1605 cond->GetNext() == branch &&
1606 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1607}
1608
Mark Mendell152408f2015-12-31 12:28:50 -05001609template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001610void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001611 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001612 LabelType* true_target,
1613 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001614 HInstruction* cond = instruction->InputAt(condition_input_index);
1615
1616 if (true_target == nullptr && false_target == nullptr) {
1617 // Nothing to do. The code always falls through.
1618 return;
1619 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001620 // Constant condition, statically compared against "true" (integer value 1).
1621 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001622 if (true_target != nullptr) {
1623 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001624 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001625 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001626 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001627 if (false_target != nullptr) {
1628 __ jmp(false_target);
1629 }
1630 }
1631 return;
1632 }
1633
1634 // The following code generates these patterns:
1635 // (1) true_target == nullptr && false_target != nullptr
1636 // - opposite condition true => branch to false_target
1637 // (2) true_target != nullptr && false_target == nullptr
1638 // - condition true => branch to true_target
1639 // (3) true_target != nullptr && false_target != nullptr
1640 // - condition true => branch to true_target
1641 // - branch to false_target
1642 if (IsBooleanValueOrMaterializedCondition(cond)) {
1643 if (AreEflagsSetFrom(cond, instruction)) {
1644 if (true_target == nullptr) {
1645 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1646 } else {
1647 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1648 }
1649 } else {
1650 // Materialized condition, compare against 0.
1651 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1652 if (lhs.IsRegister()) {
1653 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1654 } else {
1655 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1656 }
1657 if (true_target == nullptr) {
1658 __ j(kEqual, false_target);
1659 } else {
1660 __ j(kNotEqual, true_target);
1661 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001662 }
1663 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001664 // Condition has not been materialized, use its inputs as the
1665 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001666 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001667
David Brazdil0debae72015-11-12 18:37:00 +00001668 // If this is a long or FP comparison that has been folded into
1669 // the HCondition, generate the comparison directly.
1670 Primitive::Type type = condition->InputAt(0)->GetType();
1671 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1672 GenerateCompareTestAndBranch(condition, true_target, false_target);
1673 return;
1674 }
1675
1676 Location lhs = condition->GetLocations()->InAt(0);
1677 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001678 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001679 if (true_target == nullptr) {
1680 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1681 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001682 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001683 }
Dave Allison20dfc792014-06-16 20:44:29 -07001684 }
David Brazdil0debae72015-11-12 18:37:00 +00001685
1686 // If neither branch falls through (case 3), the conditional branch to `true_target`
1687 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1688 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001689 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001690 }
1691}
1692
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001693void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001694 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1695 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001696 locations->SetInAt(0, Location::Any());
1697 }
1698}
1699
1700void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001701 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1702 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1703 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1704 nullptr : codegen_->GetLabelOf(true_successor);
1705 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1706 nullptr : codegen_->GetLabelOf(false_successor);
1707 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001708}
1709
1710void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1711 LocationSummary* locations = new (GetGraph()->GetArena())
1712 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001713 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001714 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001715 locations->SetInAt(0, Location::Any());
1716 }
1717}
1718
1719void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001720 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001721 GenerateTestAndBranch<Label>(deoptimize,
1722 /* condition_input_index */ 0,
1723 slow_path->GetEntryLabel(),
1724 /* false_target */ nullptr);
1725}
1726
Mingyao Yang063fc772016-08-02 11:02:54 -07001727void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1728 LocationSummary* locations = new (GetGraph()->GetArena())
1729 LocationSummary(flag, LocationSummary::kNoCall);
1730 locations->SetOut(Location::RequiresRegister());
1731}
1732
1733void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1734 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1735 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1736}
1737
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001738static bool SelectCanUseCMOV(HSelect* select) {
1739 // There are no conditional move instructions for XMMs.
1740 if (Primitive::IsFloatingPointType(select->GetType())) {
1741 return false;
1742 }
1743
1744 // A FP condition doesn't generate the single CC that we need.
1745 HInstruction* condition = select->GetCondition();
1746 if (condition->IsCondition() &&
1747 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1748 return false;
1749 }
1750
1751 // We can generate a CMOV for this Select.
1752 return true;
1753}
1754
David Brazdil74eb1b22015-12-14 11:44:01 +00001755void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1756 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1757 if (Primitive::IsFloatingPointType(select->GetType())) {
1758 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001759 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001760 } else {
1761 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001762 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001763 if (select->InputAt(1)->IsConstant()) {
1764 locations->SetInAt(1, Location::RequiresRegister());
1765 } else {
1766 locations->SetInAt(1, Location::Any());
1767 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001768 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001769 locations->SetInAt(1, Location::Any());
1770 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001771 }
1772 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1773 locations->SetInAt(2, Location::RequiresRegister());
1774 }
1775 locations->SetOut(Location::SameAsFirstInput());
1776}
1777
1778void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1779 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001780 if (SelectCanUseCMOV(select)) {
1781 // If both the condition and the source types are integer, we can generate
1782 // a CMOV to implement Select.
1783 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001784 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001785 DCHECK(locations->InAt(0).Equals(locations->Out()));
1786
1787 HInstruction* select_condition = select->GetCondition();
1788 Condition cond = kNotEqual;
1789
1790 // Figure out how to test the 'condition'.
1791 if (select_condition->IsCondition()) {
1792 HCondition* condition = select_condition->AsCondition();
1793 if (!condition->IsEmittedAtUseSite()) {
1794 // This was a previously materialized condition.
1795 // Can we use the existing condition code?
1796 if (AreEflagsSetFrom(condition, select)) {
1797 // Materialization was the previous instruction. Condition codes are right.
1798 cond = X86_64IntegerCondition(condition->GetCondition());
1799 } else {
1800 // No, we have to recreate the condition code.
1801 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1802 __ testl(cond_reg, cond_reg);
1803 }
1804 } else {
1805 GenerateCompareTest(condition);
1806 cond = X86_64IntegerCondition(condition->GetCondition());
1807 }
1808 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001809 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001810 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1811 __ testl(cond_reg, cond_reg);
1812 }
1813
1814 // If the condition is true, overwrite the output, which already contains false.
1815 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001816 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1817 if (value_true_loc.IsRegister()) {
1818 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1819 } else {
1820 __ cmov(cond,
1821 value_false,
1822 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1823 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001824 } else {
1825 NearLabel false_target;
1826 GenerateTestAndBranch<NearLabel>(select,
1827 /* condition_input_index */ 2,
1828 /* true_target */ nullptr,
1829 &false_target);
1830 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1831 __ Bind(&false_target);
1832 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001833}
1834
David Srbecky0cf44932015-12-09 14:09:59 +00001835void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1836 new (GetGraph()->GetArena()) LocationSummary(info);
1837}
1838
David Srbeckyd28f4a02016-03-14 17:14:24 +00001839void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1840 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001841}
1842
1843void CodeGeneratorX86_64::GenerateNop() {
1844 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001845}
1846
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001847void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001848 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001849 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001850 // Handle the long/FP comparisons made in instruction simplification.
1851 switch (cond->InputAt(0)->GetType()) {
1852 case Primitive::kPrimLong:
1853 locations->SetInAt(0, Location::RequiresRegister());
1854 locations->SetInAt(1, Location::Any());
1855 break;
1856 case Primitive::kPrimFloat:
1857 case Primitive::kPrimDouble:
1858 locations->SetInAt(0, Location::RequiresFpuRegister());
1859 locations->SetInAt(1, Location::Any());
1860 break;
1861 default:
1862 locations->SetInAt(0, Location::RequiresRegister());
1863 locations->SetInAt(1, Location::Any());
1864 break;
1865 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001866 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001867 locations->SetOut(Location::RequiresRegister());
1868 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001869}
1870
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001871void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001872 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001873 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001874 }
Mark Mendellc4701932015-04-10 13:18:51 -04001875
1876 LocationSummary* locations = cond->GetLocations();
1877 Location lhs = locations->InAt(0);
1878 Location rhs = locations->InAt(1);
1879 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001880 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001881
1882 switch (cond->InputAt(0)->GetType()) {
1883 default:
1884 // Integer case.
1885
1886 // Clear output register: setcc only sets the low byte.
1887 __ xorl(reg, reg);
1888
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001889 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001890 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001891 return;
1892 case Primitive::kPrimLong:
1893 // Clear output register: setcc only sets the low byte.
1894 __ xorl(reg, reg);
1895
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001896 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001897 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001898 return;
1899 case Primitive::kPrimFloat: {
1900 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1901 if (rhs.IsConstant()) {
1902 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1903 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1904 } else if (rhs.IsStackSlot()) {
1905 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1906 } else {
1907 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1908 }
1909 GenerateFPJumps(cond, &true_label, &false_label);
1910 break;
1911 }
1912 case Primitive::kPrimDouble: {
1913 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1914 if (rhs.IsConstant()) {
1915 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1916 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1917 } else if (rhs.IsDoubleStackSlot()) {
1918 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1919 } else {
1920 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1921 }
1922 GenerateFPJumps(cond, &true_label, &false_label);
1923 break;
1924 }
1925 }
1926
1927 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001928 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001929
Roland Levillain4fa13f62015-07-06 18:11:54 +01001930 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001931 __ Bind(&false_label);
1932 __ xorl(reg, reg);
1933 __ jmp(&done_label);
1934
Roland Levillain4fa13f62015-07-06 18:11:54 +01001935 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001936 __ Bind(&true_label);
1937 __ movl(reg, Immediate(1));
1938 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001939}
1940
1941void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001942 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001943}
1944
1945void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001946 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001947}
1948
1949void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001950 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001951}
1952
1953void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001954 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001955}
1956
1957void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001958 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001959}
1960
1961void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001962 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001963}
1964
1965void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001966 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001967}
1968
1969void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001970 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001971}
1972
1973void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001974 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001975}
1976
1977void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001978 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001979}
1980
1981void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001982 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001983}
1984
1985void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001986 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001987}
1988
Aart Bike9f37602015-10-09 11:15:55 -07001989void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001990 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001991}
1992
1993void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001994 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001995}
1996
1997void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001998 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001999}
2000
2001void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002002 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002003}
2004
2005void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002006 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002007}
2008
2009void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002010 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002011}
2012
2013void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002014 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002015}
2016
2017void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002018 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002019}
2020
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002021void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002022 LocationSummary* locations =
2023 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002024 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002025 case Primitive::kPrimBoolean:
2026 case Primitive::kPrimByte:
2027 case Primitive::kPrimShort:
2028 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002029 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002030 case Primitive::kPrimLong: {
2031 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002032 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002033 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2034 break;
2035 }
2036 case Primitive::kPrimFloat:
2037 case Primitive::kPrimDouble: {
2038 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002039 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002040 locations->SetOut(Location::RequiresRegister());
2041 break;
2042 }
2043 default:
2044 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2045 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002046}
2047
2048void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002049 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002050 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002051 Location left = locations->InAt(0);
2052 Location right = locations->InAt(1);
2053
Mark Mendell0c9497d2015-08-21 09:30:05 -04002054 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002055 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002056 Condition less_cond = kLess;
2057
Calin Juravleddb7df22014-11-25 20:56:51 +00002058 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002059 case Primitive::kPrimBoolean:
2060 case Primitive::kPrimByte:
2061 case Primitive::kPrimShort:
2062 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002063 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002064 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002065 break;
2066 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002067 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002068 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002069 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002070 }
2071 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002072 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2073 if (right.IsConstant()) {
2074 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2075 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2076 } else if (right.IsStackSlot()) {
2077 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2078 } else {
2079 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2080 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002081 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002082 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002083 break;
2084 }
2085 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002086 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2087 if (right.IsConstant()) {
2088 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2089 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2090 } else if (right.IsDoubleStackSlot()) {
2091 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2092 } else {
2093 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2094 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002095 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002096 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002097 break;
2098 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002099 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002100 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002101 }
Aart Bika19616e2016-02-01 18:57:58 -08002102
Calin Juravleddb7df22014-11-25 20:56:51 +00002103 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002104 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002105 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002106
Calin Juravle91debbc2014-11-26 19:01:09 +00002107 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002108 __ movl(out, Immediate(1));
2109 __ jmp(&done);
2110
2111 __ Bind(&less);
2112 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002113
2114 __ Bind(&done);
2115}
2116
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002117void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002118 LocationSummary* locations =
2119 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002120 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002121}
2122
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002123void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002124 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002125}
2126
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002127void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2128 LocationSummary* locations =
2129 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2130 locations->SetOut(Location::ConstantLocation(constant));
2131}
2132
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002133void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002134 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002135}
2136
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002137void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002138 LocationSummary* locations =
2139 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002140 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002141}
2142
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002143void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002144 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002145}
2146
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002147void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2148 LocationSummary* locations =
2149 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2150 locations->SetOut(Location::ConstantLocation(constant));
2151}
2152
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002153void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002154 // Will be generated at use site.
2155}
2156
2157void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2158 LocationSummary* locations =
2159 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2160 locations->SetOut(Location::ConstantLocation(constant));
2161}
2162
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002163void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2164 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002165 // Will be generated at use site.
2166}
2167
Calin Juravle27df7582015-04-17 19:12:31 +01002168void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2169 memory_barrier->SetLocations(nullptr);
2170}
2171
2172void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002173 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002174}
2175
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002176void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2177 ret->SetLocations(nullptr);
2178}
2179
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002180void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002181 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002182}
2183
2184void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002185 LocationSummary* locations =
2186 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002187 switch (ret->InputAt(0)->GetType()) {
2188 case Primitive::kPrimBoolean:
2189 case Primitive::kPrimByte:
2190 case Primitive::kPrimChar:
2191 case Primitive::kPrimShort:
2192 case Primitive::kPrimInt:
2193 case Primitive::kPrimNot:
2194 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002195 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002196 break;
2197
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002198 case Primitive::kPrimFloat:
2199 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002200 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002201 break;
2202
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002203 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002204 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002205 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002206}
2207
2208void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2209 if (kIsDebugBuild) {
2210 switch (ret->InputAt(0)->GetType()) {
2211 case Primitive::kPrimBoolean:
2212 case Primitive::kPrimByte:
2213 case Primitive::kPrimChar:
2214 case Primitive::kPrimShort:
2215 case Primitive::kPrimInt:
2216 case Primitive::kPrimNot:
2217 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002218 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002219 break;
2220
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002221 case Primitive::kPrimFloat:
2222 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002223 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002224 XMM0);
2225 break;
2226
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002227 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002228 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002229 }
2230 }
2231 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002232}
2233
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002234Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2235 switch (type) {
2236 case Primitive::kPrimBoolean:
2237 case Primitive::kPrimByte:
2238 case Primitive::kPrimChar:
2239 case Primitive::kPrimShort:
2240 case Primitive::kPrimInt:
2241 case Primitive::kPrimNot:
2242 case Primitive::kPrimLong:
2243 return Location::RegisterLocation(RAX);
2244
2245 case Primitive::kPrimVoid:
2246 return Location::NoLocation();
2247
2248 case Primitive::kPrimDouble:
2249 case Primitive::kPrimFloat:
2250 return Location::FpuRegisterLocation(XMM0);
2251 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002252
2253 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002254}
2255
2256Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2257 return Location::RegisterLocation(kMethodRegisterArgument);
2258}
2259
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002260Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002261 switch (type) {
2262 case Primitive::kPrimBoolean:
2263 case Primitive::kPrimByte:
2264 case Primitive::kPrimChar:
2265 case Primitive::kPrimShort:
2266 case Primitive::kPrimInt:
2267 case Primitive::kPrimNot: {
2268 uint32_t index = gp_index_++;
2269 stack_index_++;
2270 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002271 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002272 } else {
2273 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2274 }
2275 }
2276
2277 case Primitive::kPrimLong: {
2278 uint32_t index = gp_index_;
2279 stack_index_ += 2;
2280 if (index < calling_convention.GetNumberOfRegisters()) {
2281 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002282 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002283 } else {
2284 gp_index_ += 2;
2285 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2286 }
2287 }
2288
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002289 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002290 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002291 stack_index_++;
2292 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002293 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002294 } else {
2295 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2296 }
2297 }
2298
2299 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002300 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002301 stack_index_ += 2;
2302 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002303 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002304 } else {
2305 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2306 }
2307 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002308
2309 case Primitive::kPrimVoid:
2310 LOG(FATAL) << "Unexpected parameter type " << type;
2311 break;
2312 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002313 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002314}
2315
Calin Juravle175dc732015-08-25 15:42:32 +01002316void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2317 // The trampoline uses the same calling convention as dex calling conventions,
2318 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2319 // the method_idx.
2320 HandleInvoke(invoke);
2321}
2322
2323void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2324 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2325}
2326
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002327void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002328 // Explicit clinit checks triggered by static invokes must have been pruned by
2329 // art::PrepareForRegisterAllocation.
2330 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002331
Mark Mendellfb8d2792015-03-31 22:16:59 -04002332 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002333 if (intrinsic.TryDispatch(invoke)) {
2334 return;
2335 }
2336
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002337 HandleInvoke(invoke);
2338}
2339
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002340static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2341 if (invoke->GetLocations()->Intrinsified()) {
2342 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2343 intrinsic.Dispatch(invoke);
2344 return true;
2345 }
2346 return false;
2347}
2348
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002349void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002350 // Explicit clinit checks triggered by static invokes must have been pruned by
2351 // art::PrepareForRegisterAllocation.
2352 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002353
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002354 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2355 return;
2356 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002357
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002358 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002359 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002360 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002361 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002362}
2363
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002364void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002365 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002366 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002367}
2368
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002369void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002370 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002371 if (intrinsic.TryDispatch(invoke)) {
2372 return;
2373 }
2374
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002375 HandleInvoke(invoke);
2376}
2377
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002378void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002379 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2380 return;
2381 }
2382
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002383 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002384 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002385 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002386}
2387
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002388void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2389 HandleInvoke(invoke);
2390 // Add the hidden argument.
2391 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2392}
2393
2394void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2395 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002396 LocationSummary* locations = invoke->GetLocations();
2397 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2398 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002399 Location receiver = locations->InAt(0);
2400 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2401
Roland Levillain0d5a2812015-11-13 10:07:31 +00002402 // Set the hidden argument. This is safe to do this here, as RAX
2403 // won't be modified thereafter, before the `call` instruction.
2404 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002405 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002406
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002407 if (receiver.IsStackSlot()) {
2408 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002409 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002410 __ movl(temp, Address(temp, class_offset));
2411 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002412 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002413 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002414 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002415 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002416 // Instead of simply (possibly) unpoisoning `temp` here, we should
2417 // emit a read barrier for the previous class reference load.
2418 // However this is not required in practice, as this is an
2419 // intermediate/temporary reference and because the current
2420 // concurrent copying collector keeps the from-space memory
2421 // intact/accessible until the end of the marking phase (the
2422 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002423 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002424 // temp = temp->GetAddressOfIMT()
2425 __ movq(temp,
2426 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2427 // temp = temp->GetImtEntryAt(method_offset);
2428 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002429 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002430 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002431 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002432 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002433 __ call(Address(
2434 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002435
2436 DCHECK(!codegen_->IsLeafMethod());
2437 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2438}
2439
Orion Hodsonac141392017-01-13 11:53:47 +00002440void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2441 HandleInvoke(invoke);
2442}
2443
2444void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2445 codegen_->GenerateInvokePolymorphicCall(invoke);
2446}
2447
Roland Levillain88cb1752014-10-20 16:36:47 +01002448void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2449 LocationSummary* locations =
2450 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2451 switch (neg->GetResultType()) {
2452 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002453 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002454 locations->SetInAt(0, Location::RequiresRegister());
2455 locations->SetOut(Location::SameAsFirstInput());
2456 break;
2457
Roland Levillain88cb1752014-10-20 16:36:47 +01002458 case Primitive::kPrimFloat:
2459 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002460 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002461 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002462 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002463 break;
2464
2465 default:
2466 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2467 }
2468}
2469
2470void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2471 LocationSummary* locations = neg->GetLocations();
2472 Location out = locations->Out();
2473 Location in = locations->InAt(0);
2474 switch (neg->GetResultType()) {
2475 case Primitive::kPrimInt:
2476 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002477 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002478 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002479 break;
2480
2481 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002482 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002483 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002484 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002485 break;
2486
Roland Levillain5368c212014-11-27 15:03:41 +00002487 case Primitive::kPrimFloat: {
2488 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002489 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002490 // Implement float negation with an exclusive or with value
2491 // 0x80000000 (mask for bit 31, representing the sign of a
2492 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002493 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002494 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002495 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002496 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002497
Roland Levillain5368c212014-11-27 15:03:41 +00002498 case Primitive::kPrimDouble: {
2499 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002500 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002501 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002502 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002503 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002504 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002505 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002506 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002507 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002508
2509 default:
2510 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2511 }
2512}
2513
Roland Levillaindff1f282014-11-05 14:15:05 +00002514void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2515 LocationSummary* locations =
2516 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2517 Primitive::Type result_type = conversion->GetResultType();
2518 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002519 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002520
David Brazdilb2bd1c52015-03-25 11:17:37 +00002521 // The Java language does not allow treating boolean as an integral type but
2522 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002523
Roland Levillaindff1f282014-11-05 14:15:05 +00002524 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002525 case Primitive::kPrimByte:
2526 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002527 case Primitive::kPrimLong:
2528 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002529 case Primitive::kPrimBoolean:
2530 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002531 case Primitive::kPrimShort:
2532 case Primitive::kPrimInt:
2533 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002534 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002535 locations->SetInAt(0, Location::Any());
2536 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2537 break;
2538
2539 default:
2540 LOG(FATAL) << "Unexpected type conversion from " << input_type
2541 << " to " << result_type;
2542 }
2543 break;
2544
Roland Levillain01a8d712014-11-14 16:27:39 +00002545 case Primitive::kPrimShort:
2546 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002547 case Primitive::kPrimLong:
2548 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002549 case Primitive::kPrimBoolean:
2550 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002551 case Primitive::kPrimByte:
2552 case Primitive::kPrimInt:
2553 case Primitive::kPrimChar:
2554 // Processing a Dex `int-to-short' instruction.
2555 locations->SetInAt(0, Location::Any());
2556 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2557 break;
2558
2559 default:
2560 LOG(FATAL) << "Unexpected type conversion from " << input_type
2561 << " to " << result_type;
2562 }
2563 break;
2564
Roland Levillain946e1432014-11-11 17:35:19 +00002565 case Primitive::kPrimInt:
2566 switch (input_type) {
2567 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002568 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002569 locations->SetInAt(0, Location::Any());
2570 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2571 break;
2572
2573 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002574 // Processing a Dex `float-to-int' instruction.
2575 locations->SetInAt(0, Location::RequiresFpuRegister());
2576 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002577 break;
2578
Roland Levillain946e1432014-11-11 17:35:19 +00002579 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002580 // Processing a Dex `double-to-int' instruction.
2581 locations->SetInAt(0, Location::RequiresFpuRegister());
2582 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002583 break;
2584
2585 default:
2586 LOG(FATAL) << "Unexpected type conversion from " << input_type
2587 << " to " << result_type;
2588 }
2589 break;
2590
Roland Levillaindff1f282014-11-05 14:15:05 +00002591 case Primitive::kPrimLong:
2592 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002593 case Primitive::kPrimBoolean:
2594 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002595 case Primitive::kPrimByte:
2596 case Primitive::kPrimShort:
2597 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002598 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002599 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002600 // TODO: We would benefit from a (to-be-implemented)
2601 // Location::RegisterOrStackSlot requirement for this input.
2602 locations->SetInAt(0, Location::RequiresRegister());
2603 locations->SetOut(Location::RequiresRegister());
2604 break;
2605
2606 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002607 // Processing a Dex `float-to-long' instruction.
2608 locations->SetInAt(0, Location::RequiresFpuRegister());
2609 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002610 break;
2611
Roland Levillaindff1f282014-11-05 14:15:05 +00002612 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002613 // Processing a Dex `double-to-long' instruction.
2614 locations->SetInAt(0, Location::RequiresFpuRegister());
2615 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002616 break;
2617
2618 default:
2619 LOG(FATAL) << "Unexpected type conversion from " << input_type
2620 << " to " << result_type;
2621 }
2622 break;
2623
Roland Levillain981e4542014-11-14 11:47:14 +00002624 case Primitive::kPrimChar:
2625 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002626 case Primitive::kPrimLong:
2627 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002628 case Primitive::kPrimBoolean:
2629 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002630 case Primitive::kPrimByte:
2631 case Primitive::kPrimShort:
2632 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002633 // Processing a Dex `int-to-char' instruction.
2634 locations->SetInAt(0, Location::Any());
2635 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2636 break;
2637
2638 default:
2639 LOG(FATAL) << "Unexpected type conversion from " << input_type
2640 << " to " << result_type;
2641 }
2642 break;
2643
Roland Levillaindff1f282014-11-05 14:15:05 +00002644 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002645 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002646 case Primitive::kPrimBoolean:
2647 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002648 case Primitive::kPrimByte:
2649 case Primitive::kPrimShort:
2650 case Primitive::kPrimInt:
2651 case Primitive::kPrimChar:
2652 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002653 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002654 locations->SetOut(Location::RequiresFpuRegister());
2655 break;
2656
2657 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002658 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002659 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002660 locations->SetOut(Location::RequiresFpuRegister());
2661 break;
2662
Roland Levillaincff13742014-11-17 14:32:17 +00002663 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002664 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002665 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002666 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002667 break;
2668
2669 default:
2670 LOG(FATAL) << "Unexpected type conversion from " << input_type
2671 << " to " << result_type;
2672 };
2673 break;
2674
Roland Levillaindff1f282014-11-05 14:15:05 +00002675 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002676 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002677 case Primitive::kPrimBoolean:
2678 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002679 case Primitive::kPrimByte:
2680 case Primitive::kPrimShort:
2681 case Primitive::kPrimInt:
2682 case Primitive::kPrimChar:
2683 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002684 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002685 locations->SetOut(Location::RequiresFpuRegister());
2686 break;
2687
2688 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002689 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002690 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002691 locations->SetOut(Location::RequiresFpuRegister());
2692 break;
2693
Roland Levillaincff13742014-11-17 14:32:17 +00002694 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002695 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002696 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002697 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002698 break;
2699
2700 default:
2701 LOG(FATAL) << "Unexpected type conversion from " << input_type
2702 << " to " << result_type;
2703 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002704 break;
2705
2706 default:
2707 LOG(FATAL) << "Unexpected type conversion from " << input_type
2708 << " to " << result_type;
2709 }
2710}
2711
2712void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2713 LocationSummary* locations = conversion->GetLocations();
2714 Location out = locations->Out();
2715 Location in = locations->InAt(0);
2716 Primitive::Type result_type = conversion->GetResultType();
2717 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002718 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002719 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002720 case Primitive::kPrimByte:
2721 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002722 case Primitive::kPrimLong:
2723 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002724 case Primitive::kPrimBoolean:
2725 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002726 case Primitive::kPrimShort:
2727 case Primitive::kPrimInt:
2728 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002729 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002730 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002731 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002732 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002733 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002734 Address(CpuRegister(RSP), in.GetStackIndex()));
2735 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002736 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002737 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002738 }
2739 break;
2740
2741 default:
2742 LOG(FATAL) << "Unexpected type conversion from " << input_type
2743 << " to " << result_type;
2744 }
2745 break;
2746
Roland Levillain01a8d712014-11-14 16:27:39 +00002747 case Primitive::kPrimShort:
2748 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002749 case Primitive::kPrimLong:
2750 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002751 case Primitive::kPrimBoolean:
2752 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002753 case Primitive::kPrimByte:
2754 case Primitive::kPrimInt:
2755 case Primitive::kPrimChar:
2756 // Processing a Dex `int-to-short' instruction.
2757 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002758 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002759 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002760 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002761 Address(CpuRegister(RSP), in.GetStackIndex()));
2762 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002763 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002764 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002765 }
2766 break;
2767
2768 default:
2769 LOG(FATAL) << "Unexpected type conversion from " << input_type
2770 << " to " << result_type;
2771 }
2772 break;
2773
Roland Levillain946e1432014-11-11 17:35:19 +00002774 case Primitive::kPrimInt:
2775 switch (input_type) {
2776 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002777 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002778 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002779 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002780 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002781 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002782 Address(CpuRegister(RSP), in.GetStackIndex()));
2783 } else {
2784 DCHECK(in.IsConstant());
2785 DCHECK(in.GetConstant()->IsLongConstant());
2786 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002787 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002788 }
2789 break;
2790
Roland Levillain3f8f9362014-12-02 17:45:01 +00002791 case Primitive::kPrimFloat: {
2792 // Processing a Dex `float-to-int' instruction.
2793 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2794 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002795 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002796
2797 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002798 // if input >= (float)INT_MAX goto done
2799 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002800 __ j(kAboveEqual, &done);
2801 // if input == NaN goto nan
2802 __ j(kUnordered, &nan);
2803 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002804 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002805 __ jmp(&done);
2806 __ Bind(&nan);
2807 // output = 0
2808 __ xorl(output, output);
2809 __ Bind(&done);
2810 break;
2811 }
2812
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002813 case Primitive::kPrimDouble: {
2814 // Processing a Dex `double-to-int' instruction.
2815 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2816 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002817 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002818
2819 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002820 // if input >= (double)INT_MAX goto done
2821 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002822 __ j(kAboveEqual, &done);
2823 // if input == NaN goto nan
2824 __ j(kUnordered, &nan);
2825 // output = double-to-int-truncate(input)
2826 __ cvttsd2si(output, input);
2827 __ jmp(&done);
2828 __ Bind(&nan);
2829 // output = 0
2830 __ xorl(output, output);
2831 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002832 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002833 }
Roland Levillain946e1432014-11-11 17:35:19 +00002834
2835 default:
2836 LOG(FATAL) << "Unexpected type conversion from " << input_type
2837 << " to " << result_type;
2838 }
2839 break;
2840
Roland Levillaindff1f282014-11-05 14:15:05 +00002841 case Primitive::kPrimLong:
2842 switch (input_type) {
2843 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002844 case Primitive::kPrimBoolean:
2845 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002846 case Primitive::kPrimByte:
2847 case Primitive::kPrimShort:
2848 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002849 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002850 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002851 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002852 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002853 break;
2854
Roland Levillain624279f2014-12-04 11:54:28 +00002855 case Primitive::kPrimFloat: {
2856 // Processing a Dex `float-to-long' instruction.
2857 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2858 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002859 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002860
Mark Mendell92e83bf2015-05-07 11:25:03 -04002861 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002862 // if input >= (float)LONG_MAX goto done
2863 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002864 __ j(kAboveEqual, &done);
2865 // if input == NaN goto nan
2866 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002867 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002868 __ cvttss2si(output, input, true);
2869 __ jmp(&done);
2870 __ Bind(&nan);
2871 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002872 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002873 __ Bind(&done);
2874 break;
2875 }
2876
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002877 case Primitive::kPrimDouble: {
2878 // Processing a Dex `double-to-long' instruction.
2879 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2880 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002881 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002882
Mark Mendell92e83bf2015-05-07 11:25:03 -04002883 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002884 // if input >= (double)LONG_MAX goto done
2885 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002886 __ j(kAboveEqual, &done);
2887 // if input == NaN goto nan
2888 __ j(kUnordered, &nan);
2889 // output = double-to-long-truncate(input)
2890 __ cvttsd2si(output, input, true);
2891 __ jmp(&done);
2892 __ Bind(&nan);
2893 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002894 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002895 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002896 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002897 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002898
2899 default:
2900 LOG(FATAL) << "Unexpected type conversion from " << input_type
2901 << " to " << result_type;
2902 }
2903 break;
2904
Roland Levillain981e4542014-11-14 11:47:14 +00002905 case Primitive::kPrimChar:
2906 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002907 case Primitive::kPrimLong:
2908 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002909 case Primitive::kPrimBoolean:
2910 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002911 case Primitive::kPrimByte:
2912 case Primitive::kPrimShort:
2913 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002914 // Processing a Dex `int-to-char' instruction.
2915 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002916 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002917 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002918 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002919 Address(CpuRegister(RSP), in.GetStackIndex()));
2920 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002921 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002922 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002923 }
2924 break;
2925
2926 default:
2927 LOG(FATAL) << "Unexpected type conversion from " << input_type
2928 << " to " << result_type;
2929 }
2930 break;
2931
Roland Levillaindff1f282014-11-05 14:15:05 +00002932 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002933 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002934 case Primitive::kPrimBoolean:
2935 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002936 case Primitive::kPrimByte:
2937 case Primitive::kPrimShort:
2938 case Primitive::kPrimInt:
2939 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002940 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002941 if (in.IsRegister()) {
2942 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2943 } else if (in.IsConstant()) {
2944 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2945 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002946 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002947 } else {
2948 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2949 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2950 }
Roland Levillaincff13742014-11-17 14:32:17 +00002951 break;
2952
2953 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002954 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002955 if (in.IsRegister()) {
2956 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2957 } else if (in.IsConstant()) {
2958 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2959 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002960 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002961 } else {
2962 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2963 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2964 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002965 break;
2966
Roland Levillaincff13742014-11-17 14:32:17 +00002967 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002968 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002969 if (in.IsFpuRegister()) {
2970 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2971 } else if (in.IsConstant()) {
2972 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2973 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002974 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002975 } else {
2976 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2977 Address(CpuRegister(RSP), in.GetStackIndex()));
2978 }
Roland Levillaincff13742014-11-17 14:32:17 +00002979 break;
2980
2981 default:
2982 LOG(FATAL) << "Unexpected type conversion from " << input_type
2983 << " to " << result_type;
2984 };
2985 break;
2986
Roland Levillaindff1f282014-11-05 14:15:05 +00002987 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002988 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002989 case Primitive::kPrimBoolean:
2990 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002991 case Primitive::kPrimByte:
2992 case Primitive::kPrimShort:
2993 case Primitive::kPrimInt:
2994 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002995 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002996 if (in.IsRegister()) {
2997 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2998 } else if (in.IsConstant()) {
2999 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3000 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003001 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003002 } else {
3003 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3004 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3005 }
Roland Levillaincff13742014-11-17 14:32:17 +00003006 break;
3007
3008 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00003009 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003010 if (in.IsRegister()) {
3011 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3012 } else if (in.IsConstant()) {
3013 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3014 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003015 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003016 } else {
3017 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3018 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3019 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003020 break;
3021
Roland Levillaincff13742014-11-17 14:32:17 +00003022 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003023 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003024 if (in.IsFpuRegister()) {
3025 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3026 } else if (in.IsConstant()) {
3027 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3028 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003029 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003030 } else {
3031 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3032 Address(CpuRegister(RSP), in.GetStackIndex()));
3033 }
Roland Levillaincff13742014-11-17 14:32:17 +00003034 break;
3035
3036 default:
3037 LOG(FATAL) << "Unexpected type conversion from " << input_type
3038 << " to " << result_type;
3039 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003040 break;
3041
3042 default:
3043 LOG(FATAL) << "Unexpected type conversion from " << input_type
3044 << " to " << result_type;
3045 }
3046}
3047
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003048void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003049 LocationSummary* locations =
3050 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003051 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003052 case Primitive::kPrimInt: {
3053 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003054 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3055 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003056 break;
3057 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003058
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003059 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003060 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003061 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003062 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003063 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003064 break;
3065 }
3066
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003067 case Primitive::kPrimDouble:
3068 case Primitive::kPrimFloat: {
3069 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003070 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003071 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003072 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003073 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003074
3075 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003076 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003077 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003078}
3079
3080void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3081 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003082 Location first = locations->InAt(0);
3083 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003084 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003085
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003086 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003087 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003088 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003089 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3090 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003091 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3092 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003093 } else {
3094 __ leal(out.AsRegister<CpuRegister>(), Address(
3095 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3096 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003097 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003098 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3099 __ addl(out.AsRegister<CpuRegister>(),
3100 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3101 } else {
3102 __ leal(out.AsRegister<CpuRegister>(), Address(
3103 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3104 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003105 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003106 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003107 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003108 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003109 break;
3110 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003111
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003112 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003113 if (second.IsRegister()) {
3114 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3115 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003116 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3117 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003118 } else {
3119 __ leaq(out.AsRegister<CpuRegister>(), Address(
3120 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3121 }
3122 } else {
3123 DCHECK(second.IsConstant());
3124 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3125 int32_t int32_value = Low32Bits(value);
3126 DCHECK_EQ(int32_value, value);
3127 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3128 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3129 } else {
3130 __ leaq(out.AsRegister<CpuRegister>(), Address(
3131 first.AsRegister<CpuRegister>(), int32_value));
3132 }
3133 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003134 break;
3135 }
3136
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003137 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003138 if (second.IsFpuRegister()) {
3139 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3140 } else if (second.IsConstant()) {
3141 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003142 codegen_->LiteralFloatAddress(
3143 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003144 } else {
3145 DCHECK(second.IsStackSlot());
3146 __ addss(first.AsFpuRegister<XmmRegister>(),
3147 Address(CpuRegister(RSP), second.GetStackIndex()));
3148 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003149 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003150 }
3151
3152 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003153 if (second.IsFpuRegister()) {
3154 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3155 } else if (second.IsConstant()) {
3156 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003157 codegen_->LiteralDoubleAddress(
3158 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003159 } else {
3160 DCHECK(second.IsDoubleStackSlot());
3161 __ addsd(first.AsFpuRegister<XmmRegister>(),
3162 Address(CpuRegister(RSP), second.GetStackIndex()));
3163 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003164 break;
3165 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003166
3167 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003168 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003169 }
3170}
3171
3172void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003173 LocationSummary* locations =
3174 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003175 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003176 case Primitive::kPrimInt: {
3177 locations->SetInAt(0, Location::RequiresRegister());
3178 locations->SetInAt(1, Location::Any());
3179 locations->SetOut(Location::SameAsFirstInput());
3180 break;
3181 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003182 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003183 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003184 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003185 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003186 break;
3187 }
Calin Juravle11351682014-10-23 15:38:15 +01003188 case Primitive::kPrimFloat:
3189 case Primitive::kPrimDouble: {
3190 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003191 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003192 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003193 break;
Calin Juravle11351682014-10-23 15:38:15 +01003194 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003195 default:
Calin Juravle11351682014-10-23 15:38:15 +01003196 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003197 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003198}
3199
3200void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3201 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003202 Location first = locations->InAt(0);
3203 Location second = locations->InAt(1);
3204 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003205 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003206 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003207 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003208 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003209 } else if (second.IsConstant()) {
3210 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003211 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003212 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003213 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003214 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003215 break;
3216 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003217 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003218 if (second.IsConstant()) {
3219 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3220 DCHECK(IsInt<32>(value));
3221 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3222 } else {
3223 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3224 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003225 break;
3226 }
3227
Calin Juravle11351682014-10-23 15:38:15 +01003228 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003229 if (second.IsFpuRegister()) {
3230 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3231 } else if (second.IsConstant()) {
3232 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003233 codegen_->LiteralFloatAddress(
3234 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003235 } else {
3236 DCHECK(second.IsStackSlot());
3237 __ subss(first.AsFpuRegister<XmmRegister>(),
3238 Address(CpuRegister(RSP), second.GetStackIndex()));
3239 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003240 break;
Calin Juravle11351682014-10-23 15:38:15 +01003241 }
3242
3243 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003244 if (second.IsFpuRegister()) {
3245 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3246 } else if (second.IsConstant()) {
3247 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003248 codegen_->LiteralDoubleAddress(
3249 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003250 } else {
3251 DCHECK(second.IsDoubleStackSlot());
3252 __ subsd(first.AsFpuRegister<XmmRegister>(),
3253 Address(CpuRegister(RSP), second.GetStackIndex()));
3254 }
Calin Juravle11351682014-10-23 15:38:15 +01003255 break;
3256 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003257
3258 default:
Calin Juravle11351682014-10-23 15:38:15 +01003259 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003260 }
3261}
3262
Calin Juravle34bacdf2014-10-07 20:23:36 +01003263void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3264 LocationSummary* locations =
3265 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3266 switch (mul->GetResultType()) {
3267 case Primitive::kPrimInt: {
3268 locations->SetInAt(0, Location::RequiresRegister());
3269 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003270 if (mul->InputAt(1)->IsIntConstant()) {
3271 // Can use 3 operand multiply.
3272 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3273 } else {
3274 locations->SetOut(Location::SameAsFirstInput());
3275 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003276 break;
3277 }
3278 case Primitive::kPrimLong: {
3279 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003280 locations->SetInAt(1, Location::Any());
3281 if (mul->InputAt(1)->IsLongConstant() &&
3282 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003283 // Can use 3 operand multiply.
3284 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3285 } else {
3286 locations->SetOut(Location::SameAsFirstInput());
3287 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003288 break;
3289 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003290 case Primitive::kPrimFloat:
3291 case Primitive::kPrimDouble: {
3292 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003293 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003294 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003295 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003296 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003297
3298 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003299 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003300 }
3301}
3302
3303void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3304 LocationSummary* locations = mul->GetLocations();
3305 Location first = locations->InAt(0);
3306 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003307 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003308 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003309 case Primitive::kPrimInt:
3310 // The constant may have ended up in a register, so test explicitly to avoid
3311 // problems where the output may not be the same as the first operand.
3312 if (mul->InputAt(1)->IsIntConstant()) {
3313 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3314 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3315 } else if (second.IsRegister()) {
3316 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003317 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003318 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003319 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003320 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003321 __ imull(first.AsRegister<CpuRegister>(),
3322 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003323 }
3324 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003325 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003326 // The constant may have ended up in a register, so test explicitly to avoid
3327 // problems where the output may not be the same as the first operand.
3328 if (mul->InputAt(1)->IsLongConstant()) {
3329 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3330 if (IsInt<32>(value)) {
3331 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3332 Immediate(static_cast<int32_t>(value)));
3333 } else {
3334 // Have to use the constant area.
3335 DCHECK(first.Equals(out));
3336 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3337 }
3338 } else if (second.IsRegister()) {
3339 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003340 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003341 } else {
3342 DCHECK(second.IsDoubleStackSlot());
3343 DCHECK(first.Equals(out));
3344 __ imulq(first.AsRegister<CpuRegister>(),
3345 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003346 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003347 break;
3348 }
3349
Calin Juravleb5bfa962014-10-21 18:02:24 +01003350 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003351 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003352 if (second.IsFpuRegister()) {
3353 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3354 } else if (second.IsConstant()) {
3355 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003356 codegen_->LiteralFloatAddress(
3357 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003358 } else {
3359 DCHECK(second.IsStackSlot());
3360 __ mulss(first.AsFpuRegister<XmmRegister>(),
3361 Address(CpuRegister(RSP), second.GetStackIndex()));
3362 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003363 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003364 }
3365
3366 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003367 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003368 if (second.IsFpuRegister()) {
3369 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3370 } else if (second.IsConstant()) {
3371 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003372 codegen_->LiteralDoubleAddress(
3373 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003374 } else {
3375 DCHECK(second.IsDoubleStackSlot());
3376 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3377 Address(CpuRegister(RSP), second.GetStackIndex()));
3378 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003379 break;
3380 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003381
3382 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003383 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003384 }
3385}
3386
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003387void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3388 uint32_t stack_adjustment, bool is_float) {
3389 if (source.IsStackSlot()) {
3390 DCHECK(is_float);
3391 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3392 } else if (source.IsDoubleStackSlot()) {
3393 DCHECK(!is_float);
3394 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3395 } else {
3396 // Write the value to the temporary location on the stack and load to FP stack.
3397 if (is_float) {
3398 Location stack_temp = Location::StackSlot(temp_offset);
3399 codegen_->Move(stack_temp, source);
3400 __ flds(Address(CpuRegister(RSP), temp_offset));
3401 } else {
3402 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3403 codegen_->Move(stack_temp, source);
3404 __ fldl(Address(CpuRegister(RSP), temp_offset));
3405 }
3406 }
3407}
3408
3409void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3410 Primitive::Type type = rem->GetResultType();
3411 bool is_float = type == Primitive::kPrimFloat;
3412 size_t elem_size = Primitive::ComponentSize(type);
3413 LocationSummary* locations = rem->GetLocations();
3414 Location first = locations->InAt(0);
3415 Location second = locations->InAt(1);
3416 Location out = locations->Out();
3417
3418 // Create stack space for 2 elements.
3419 // TODO: enhance register allocator to ask for stack temporaries.
3420 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3421
3422 // Load the values to the FP stack in reverse order, using temporaries if needed.
3423 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3424 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3425
3426 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003427 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003428 __ Bind(&retry);
3429 __ fprem();
3430
3431 // Move FP status to AX.
3432 __ fstsw();
3433
3434 // And see if the argument reduction is complete. This is signaled by the
3435 // C2 FPU flag bit set to 0.
3436 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3437 __ j(kNotEqual, &retry);
3438
3439 // We have settled on the final value. Retrieve it into an XMM register.
3440 // Store FP top of stack to real stack.
3441 if (is_float) {
3442 __ fsts(Address(CpuRegister(RSP), 0));
3443 } else {
3444 __ fstl(Address(CpuRegister(RSP), 0));
3445 }
3446
3447 // Pop the 2 items from the FP stack.
3448 __ fucompp();
3449
3450 // Load the value from the stack into an XMM register.
3451 DCHECK(out.IsFpuRegister()) << out;
3452 if (is_float) {
3453 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3454 } else {
3455 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3456 }
3457
3458 // And remove the temporary stack space we allocated.
3459 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3460}
3461
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003462void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3463 DCHECK(instruction->IsDiv() || instruction->IsRem());
3464
3465 LocationSummary* locations = instruction->GetLocations();
3466 Location second = locations->InAt(1);
3467 DCHECK(second.IsConstant());
3468
3469 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3470 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003471 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003472
3473 DCHECK(imm == 1 || imm == -1);
3474
3475 switch (instruction->GetResultType()) {
3476 case Primitive::kPrimInt: {
3477 if (instruction->IsRem()) {
3478 __ xorl(output_register, output_register);
3479 } else {
3480 __ movl(output_register, input_register);
3481 if (imm == -1) {
3482 __ negl(output_register);
3483 }
3484 }
3485 break;
3486 }
3487
3488 case Primitive::kPrimLong: {
3489 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003490 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003491 } else {
3492 __ movq(output_register, input_register);
3493 if (imm == -1) {
3494 __ negq(output_register);
3495 }
3496 }
3497 break;
3498 }
3499
3500 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003501 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003502 }
3503}
3504
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003505void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003506 LocationSummary* locations = instruction->GetLocations();
3507 Location second = locations->InAt(1);
3508
3509 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3510 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3511
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003512 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003513 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3514 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003515
3516 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3517
3518 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003519 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003520 __ testl(numerator, numerator);
3521 __ cmov(kGreaterEqual, tmp, numerator);
3522 int shift = CTZ(imm);
3523 __ sarl(tmp, Immediate(shift));
3524
3525 if (imm < 0) {
3526 __ negl(tmp);
3527 }
3528
3529 __ movl(output_register, tmp);
3530 } else {
3531 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3532 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3533
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003534 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003535 __ addq(rdx, numerator);
3536 __ testq(numerator, numerator);
3537 __ cmov(kGreaterEqual, rdx, numerator);
3538 int shift = CTZ(imm);
3539 __ sarq(rdx, Immediate(shift));
3540
3541 if (imm < 0) {
3542 __ negq(rdx);
3543 }
3544
3545 __ movq(output_register, rdx);
3546 }
3547}
3548
3549void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3550 DCHECK(instruction->IsDiv() || instruction->IsRem());
3551
3552 LocationSummary* locations = instruction->GetLocations();
3553 Location second = locations->InAt(1);
3554
3555 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3556 : locations->GetTemp(0).AsRegister<CpuRegister>();
3557 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3558 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3559 : locations->Out().AsRegister<CpuRegister>();
3560 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3561
3562 DCHECK_EQ(RAX, eax.AsRegister());
3563 DCHECK_EQ(RDX, edx.AsRegister());
3564 if (instruction->IsDiv()) {
3565 DCHECK_EQ(RAX, out.AsRegister());
3566 } else {
3567 DCHECK_EQ(RDX, out.AsRegister());
3568 }
3569
3570 int64_t magic;
3571 int shift;
3572
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003573 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003574 if (instruction->GetResultType() == Primitive::kPrimInt) {
3575 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3576
3577 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3578
3579 __ movl(numerator, eax);
3580
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003581 __ movl(eax, Immediate(magic));
3582 __ imull(numerator);
3583
3584 if (imm > 0 && magic < 0) {
3585 __ addl(edx, numerator);
3586 } else if (imm < 0 && magic > 0) {
3587 __ subl(edx, numerator);
3588 }
3589
3590 if (shift != 0) {
3591 __ sarl(edx, Immediate(shift));
3592 }
3593
3594 __ movl(eax, edx);
3595 __ shrl(edx, Immediate(31));
3596 __ addl(edx, eax);
3597
3598 if (instruction->IsRem()) {
3599 __ movl(eax, numerator);
3600 __ imull(edx, Immediate(imm));
3601 __ subl(eax, edx);
3602 __ movl(edx, eax);
3603 } else {
3604 __ movl(eax, edx);
3605 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003606 } else {
3607 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3608
3609 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3610
3611 CpuRegister rax = eax;
3612 CpuRegister rdx = edx;
3613
3614 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3615
3616 // Save the numerator.
3617 __ movq(numerator, rax);
3618
3619 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003620 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003621
3622 // RDX:RAX = magic * numerator
3623 __ imulq(numerator);
3624
3625 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003626 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003627 __ addq(rdx, numerator);
3628 } else if (imm < 0 && magic > 0) {
3629 // RDX -= numerator
3630 __ subq(rdx, numerator);
3631 }
3632
3633 // Shift if needed.
3634 if (shift != 0) {
3635 __ sarq(rdx, Immediate(shift));
3636 }
3637
3638 // RDX += 1 if RDX < 0
3639 __ movq(rax, rdx);
3640 __ shrq(rdx, Immediate(63));
3641 __ addq(rdx, rax);
3642
3643 if (instruction->IsRem()) {
3644 __ movq(rax, numerator);
3645
3646 if (IsInt<32>(imm)) {
3647 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3648 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003649 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003650 }
3651
3652 __ subq(rax, rdx);
3653 __ movq(rdx, rax);
3654 } else {
3655 __ movq(rax, rdx);
3656 }
3657 }
3658}
3659
Calin Juravlebacfec32014-11-14 15:54:36 +00003660void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3661 DCHECK(instruction->IsDiv() || instruction->IsRem());
3662 Primitive::Type type = instruction->GetResultType();
3663 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3664
3665 bool is_div = instruction->IsDiv();
3666 LocationSummary* locations = instruction->GetLocations();
3667
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003668 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3669 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003670
Roland Levillain271ab9c2014-11-27 15:23:57 +00003671 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003672 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003673
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003674 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003675 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003676
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003677 if (imm == 0) {
3678 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3679 } else if (imm == 1 || imm == -1) {
3680 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003681 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003682 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003683 } else {
3684 DCHECK(imm <= -2 || imm >= 2);
3685 GenerateDivRemWithAnyConstant(instruction);
3686 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003687 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003688 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003689 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003690 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003691 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003692
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003693 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3694 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3695 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3696 // so it's safe to just use negl instead of more complex comparisons.
3697 if (type == Primitive::kPrimInt) {
3698 __ cmpl(second_reg, Immediate(-1));
3699 __ j(kEqual, slow_path->GetEntryLabel());
3700 // edx:eax <- sign-extended of eax
3701 __ cdq();
3702 // eax = quotient, edx = remainder
3703 __ idivl(second_reg);
3704 } else {
3705 __ cmpq(second_reg, Immediate(-1));
3706 __ j(kEqual, slow_path->GetEntryLabel());
3707 // rdx:rax <- sign-extended of rax
3708 __ cqo();
3709 // rax = quotient, rdx = remainder
3710 __ idivq(second_reg);
3711 }
3712 __ Bind(slow_path->GetExitLabel());
3713 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003714}
3715
Calin Juravle7c4954d2014-10-28 16:57:40 +00003716void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3717 LocationSummary* locations =
3718 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3719 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003720 case Primitive::kPrimInt:
3721 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003722 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003723 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003724 locations->SetOut(Location::SameAsFirstInput());
3725 // Intel uses edx:eax as the dividend.
3726 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003727 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3728 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3729 // output and request another temp.
3730 if (div->InputAt(1)->IsConstant()) {
3731 locations->AddTemp(Location::RequiresRegister());
3732 }
Calin Juravled0d48522014-11-04 16:40:20 +00003733 break;
3734 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003735
Calin Juravle7c4954d2014-10-28 16:57:40 +00003736 case Primitive::kPrimFloat:
3737 case Primitive::kPrimDouble: {
3738 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003739 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003740 locations->SetOut(Location::SameAsFirstInput());
3741 break;
3742 }
3743
3744 default:
3745 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3746 }
3747}
3748
3749void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3750 LocationSummary* locations = div->GetLocations();
3751 Location first = locations->InAt(0);
3752 Location second = locations->InAt(1);
3753 DCHECK(first.Equals(locations->Out()));
3754
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003755 Primitive::Type type = div->GetResultType();
3756 switch (type) {
3757 case Primitive::kPrimInt:
3758 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003759 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003760 break;
3761 }
3762
Calin Juravle7c4954d2014-10-28 16:57:40 +00003763 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003764 if (second.IsFpuRegister()) {
3765 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3766 } else if (second.IsConstant()) {
3767 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003768 codegen_->LiteralFloatAddress(
3769 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003770 } else {
3771 DCHECK(second.IsStackSlot());
3772 __ divss(first.AsFpuRegister<XmmRegister>(),
3773 Address(CpuRegister(RSP), second.GetStackIndex()));
3774 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003775 break;
3776 }
3777
3778 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003779 if (second.IsFpuRegister()) {
3780 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3781 } else if (second.IsConstant()) {
3782 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003783 codegen_->LiteralDoubleAddress(
3784 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003785 } else {
3786 DCHECK(second.IsDoubleStackSlot());
3787 __ divsd(first.AsFpuRegister<XmmRegister>(),
3788 Address(CpuRegister(RSP), second.GetStackIndex()));
3789 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003790 break;
3791 }
3792
3793 default:
3794 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3795 }
3796}
3797
Calin Juravlebacfec32014-11-14 15:54:36 +00003798void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003799 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003800 LocationSummary* locations =
3801 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003802
3803 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003804 case Primitive::kPrimInt:
3805 case Primitive::kPrimLong: {
3806 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003807 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003808 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3809 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003810 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3811 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3812 // output and request another temp.
3813 if (rem->InputAt(1)->IsConstant()) {
3814 locations->AddTemp(Location::RequiresRegister());
3815 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003816 break;
3817 }
3818
3819 case Primitive::kPrimFloat:
3820 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003821 locations->SetInAt(0, Location::Any());
3822 locations->SetInAt(1, Location::Any());
3823 locations->SetOut(Location::RequiresFpuRegister());
3824 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003825 break;
3826 }
3827
3828 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003829 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003830 }
3831}
3832
3833void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3834 Primitive::Type type = rem->GetResultType();
3835 switch (type) {
3836 case Primitive::kPrimInt:
3837 case Primitive::kPrimLong: {
3838 GenerateDivRemIntegral(rem);
3839 break;
3840 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003841 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003842 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003843 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003844 break;
3845 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003846 default:
3847 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3848 }
3849}
3850
Calin Juravled0d48522014-11-04 16:40:20 +00003851void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003852 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003853 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003854}
3855
3856void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003857 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003858 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3859 codegen_->AddSlowPath(slow_path);
3860
3861 LocationSummary* locations = instruction->GetLocations();
3862 Location value = locations->InAt(0);
3863
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003864 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003865 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003866 case Primitive::kPrimByte:
3867 case Primitive::kPrimChar:
3868 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003869 case Primitive::kPrimInt: {
3870 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003871 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003872 __ j(kEqual, slow_path->GetEntryLabel());
3873 } else if (value.IsStackSlot()) {
3874 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3875 __ j(kEqual, slow_path->GetEntryLabel());
3876 } else {
3877 DCHECK(value.IsConstant()) << value;
3878 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003879 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003880 }
3881 }
3882 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003883 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003884 case Primitive::kPrimLong: {
3885 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003886 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003887 __ j(kEqual, slow_path->GetEntryLabel());
3888 } else if (value.IsDoubleStackSlot()) {
3889 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3890 __ j(kEqual, slow_path->GetEntryLabel());
3891 } else {
3892 DCHECK(value.IsConstant()) << value;
3893 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003894 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003895 }
3896 }
3897 break;
3898 }
3899 default:
3900 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003901 }
Calin Juravled0d48522014-11-04 16:40:20 +00003902}
3903
Calin Juravle9aec02f2014-11-18 23:06:35 +00003904void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3905 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3906
3907 LocationSummary* locations =
3908 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3909
3910 switch (op->GetResultType()) {
3911 case Primitive::kPrimInt:
3912 case Primitive::kPrimLong: {
3913 locations->SetInAt(0, Location::RequiresRegister());
3914 // The shift count needs to be in CL.
3915 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3916 locations->SetOut(Location::SameAsFirstInput());
3917 break;
3918 }
3919 default:
3920 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3921 }
3922}
3923
3924void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3925 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3926
3927 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003928 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003929 Location second = locations->InAt(1);
3930
3931 switch (op->GetResultType()) {
3932 case Primitive::kPrimInt: {
3933 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003934 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003935 if (op->IsShl()) {
3936 __ shll(first_reg, second_reg);
3937 } else if (op->IsShr()) {
3938 __ sarl(first_reg, second_reg);
3939 } else {
3940 __ shrl(first_reg, second_reg);
3941 }
3942 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003943 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003944 if (op->IsShl()) {
3945 __ shll(first_reg, imm);
3946 } else if (op->IsShr()) {
3947 __ sarl(first_reg, imm);
3948 } else {
3949 __ shrl(first_reg, imm);
3950 }
3951 }
3952 break;
3953 }
3954 case Primitive::kPrimLong: {
3955 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003956 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003957 if (op->IsShl()) {
3958 __ shlq(first_reg, second_reg);
3959 } else if (op->IsShr()) {
3960 __ sarq(first_reg, second_reg);
3961 } else {
3962 __ shrq(first_reg, second_reg);
3963 }
3964 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003965 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003966 if (op->IsShl()) {
3967 __ shlq(first_reg, imm);
3968 } else if (op->IsShr()) {
3969 __ sarq(first_reg, imm);
3970 } else {
3971 __ shrq(first_reg, imm);
3972 }
3973 }
3974 break;
3975 }
3976 default:
3977 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003978 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003979 }
3980}
3981
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003982void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3983 LocationSummary* locations =
3984 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3985
3986 switch (ror->GetResultType()) {
3987 case Primitive::kPrimInt:
3988 case Primitive::kPrimLong: {
3989 locations->SetInAt(0, Location::RequiresRegister());
3990 // The shift count needs to be in CL (unless it is a constant).
3991 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3992 locations->SetOut(Location::SameAsFirstInput());
3993 break;
3994 }
3995 default:
3996 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3997 UNREACHABLE();
3998 }
3999}
4000
4001void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4002 LocationSummary* locations = ror->GetLocations();
4003 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4004 Location second = locations->InAt(1);
4005
4006 switch (ror->GetResultType()) {
4007 case Primitive::kPrimInt:
4008 if (second.IsRegister()) {
4009 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4010 __ rorl(first_reg, second_reg);
4011 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004012 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004013 __ rorl(first_reg, imm);
4014 }
4015 break;
4016 case Primitive::kPrimLong:
4017 if (second.IsRegister()) {
4018 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4019 __ rorq(first_reg, second_reg);
4020 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004021 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004022 __ rorq(first_reg, imm);
4023 }
4024 break;
4025 default:
4026 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4027 UNREACHABLE();
4028 }
4029}
4030
Calin Juravle9aec02f2014-11-18 23:06:35 +00004031void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4032 HandleShift(shl);
4033}
4034
4035void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4036 HandleShift(shl);
4037}
4038
4039void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4040 HandleShift(shr);
4041}
4042
4043void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4044 HandleShift(shr);
4045}
4046
4047void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4048 HandleShift(ushr);
4049}
4050
4051void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4052 HandleShift(ushr);
4053}
4054
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004055void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004056 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004057 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004058 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004059 if (instruction->IsStringAlloc()) {
4060 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4061 } else {
4062 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004063 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004064 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004065}
4066
4067void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004068 // Note: if heap poisoning is enabled, the entry point takes cares
4069 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004070 if (instruction->IsStringAlloc()) {
4071 // String is allocated through StringFactory. Call NewEmptyString entry point.
4072 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004073 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004074 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4075 __ call(Address(temp, code_offset.SizeValue()));
4076 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4077 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004078 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004079 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004080 DCHECK(!codegen_->IsLeafMethod());
4081 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004082}
4083
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004084void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4085 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004086 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004087 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004088 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004089 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4090 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004091}
4092
4093void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004094 // Note: if heap poisoning is enabled, the entry point takes cares
4095 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004096 QuickEntrypointEnum entrypoint =
4097 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4098 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004099 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004100 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004101}
4102
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004103void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004104 LocationSummary* locations =
4105 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004106 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4107 if (location.IsStackSlot()) {
4108 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4109 } else if (location.IsDoubleStackSlot()) {
4110 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4111 }
4112 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004113}
4114
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004115void InstructionCodeGeneratorX86_64::VisitParameterValue(
4116 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004117 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004118}
4119
4120void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4121 LocationSummary* locations =
4122 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4123 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4124}
4125
4126void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4127 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4128 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004129}
4130
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004131void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4132 LocationSummary* locations =
4133 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4134 locations->SetInAt(0, Location::RequiresRegister());
4135 locations->SetOut(Location::RequiresRegister());
4136}
4137
4138void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4139 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004140 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004141 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004142 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004143 __ movq(locations->Out().AsRegister<CpuRegister>(),
4144 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004145 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004146 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004147 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004148 __ movq(locations->Out().AsRegister<CpuRegister>(),
4149 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4150 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004151 __ movq(locations->Out().AsRegister<CpuRegister>(),
4152 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004153 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004154}
4155
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004156void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004157 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004158 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004159 locations->SetInAt(0, Location::RequiresRegister());
4160 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004161}
4162
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004163void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4164 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004165 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4166 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004167 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004168 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004169 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004170 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004171 break;
4172
4173 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004174 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004175 break;
4176
4177 default:
4178 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4179 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004180}
4181
David Brazdil66d126e2015-04-03 16:02:44 +01004182void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4183 LocationSummary* locations =
4184 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4185 locations->SetInAt(0, Location::RequiresRegister());
4186 locations->SetOut(Location::SameAsFirstInput());
4187}
4188
4189void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004190 LocationSummary* locations = bool_not->GetLocations();
4191 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4192 locations->Out().AsRegister<CpuRegister>().AsRegister());
4193 Location out = locations->Out();
4194 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4195}
4196
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004197void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004198 LocationSummary* locations =
4199 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004200 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004201 locations->SetInAt(i, Location::Any());
4202 }
4203 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004204}
4205
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004206void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004207 LOG(FATAL) << "Unimplemented";
4208}
4209
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004210void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004211 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004212 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004213 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004214 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4215 */
4216 switch (kind) {
4217 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004218 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004219 break;
4220 }
4221 case MemBarrierKind::kAnyStore:
4222 case MemBarrierKind::kLoadAny:
4223 case MemBarrierKind::kStoreStore: {
4224 // nop
4225 break;
4226 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004227 case MemBarrierKind::kNTStoreStore:
4228 // Non-Temporal Store/Store needs an explicit fence.
4229 MemoryFence(/* non-temporal */ true);
4230 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004231 }
4232}
4233
4234void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4235 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4236
Roland Levillain0d5a2812015-11-13 10:07:31 +00004237 bool object_field_get_with_read_barrier =
4238 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004239 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004240 new (GetGraph()->GetArena()) LocationSummary(instruction,
4241 object_field_get_with_read_barrier ?
4242 LocationSummary::kCallOnSlowPath :
4243 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004244 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004245 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004246 }
Calin Juravle52c48962014-12-16 17:02:57 +00004247 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004248 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4249 locations->SetOut(Location::RequiresFpuRegister());
4250 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004251 // The output overlaps for an object field get when read barriers
4252 // are enabled: we do not want the move to overwrite the object's
4253 // location, as we need it to emit the read barrier.
4254 locations->SetOut(
4255 Location::RequiresRegister(),
4256 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004257 }
Calin Juravle52c48962014-12-16 17:02:57 +00004258}
4259
4260void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4261 const FieldInfo& field_info) {
4262 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4263
4264 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004265 Location base_loc = locations->InAt(0);
4266 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004267 Location out = locations->Out();
4268 bool is_volatile = field_info.IsVolatile();
4269 Primitive::Type field_type = field_info.GetFieldType();
4270 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4271
4272 switch (field_type) {
4273 case Primitive::kPrimBoolean: {
4274 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4275 break;
4276 }
4277
4278 case Primitive::kPrimByte: {
4279 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4280 break;
4281 }
4282
4283 case Primitive::kPrimShort: {
4284 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4285 break;
4286 }
4287
4288 case Primitive::kPrimChar: {
4289 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4290 break;
4291 }
4292
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004293 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004294 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4295 break;
4296 }
4297
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004298 case Primitive::kPrimNot: {
4299 // /* HeapReference<Object> */ out = *(base + offset)
4300 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004301 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004302 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004303 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004304 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004305 if (is_volatile) {
4306 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4307 }
4308 } else {
4309 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4310 codegen_->MaybeRecordImplicitNullCheck(instruction);
4311 if (is_volatile) {
4312 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4313 }
4314 // If read barriers are enabled, emit read barriers other than
4315 // Baker's using a slow path (and also unpoison the loaded
4316 // reference, if heap poisoning is enabled).
4317 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4318 }
4319 break;
4320 }
4321
Calin Juravle52c48962014-12-16 17:02:57 +00004322 case Primitive::kPrimLong: {
4323 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4324 break;
4325 }
4326
4327 case Primitive::kPrimFloat: {
4328 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4329 break;
4330 }
4331
4332 case Primitive::kPrimDouble: {
4333 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4334 break;
4335 }
4336
4337 case Primitive::kPrimVoid:
4338 LOG(FATAL) << "Unreachable type " << field_type;
4339 UNREACHABLE();
4340 }
4341
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004342 if (field_type == Primitive::kPrimNot) {
4343 // Potential implicit null checks, in the case of reference
4344 // fields, are handled in the previous switch statement.
4345 } else {
4346 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004347 }
Roland Levillain4d027112015-07-01 15:41:14 +01004348
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004349 if (is_volatile) {
4350 if (field_type == Primitive::kPrimNot) {
4351 // Memory barriers, in the case of references, are also handled
4352 // in the previous switch statement.
4353 } else {
4354 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4355 }
Roland Levillain4d027112015-07-01 15:41:14 +01004356 }
Calin Juravle52c48962014-12-16 17:02:57 +00004357}
4358
4359void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4360 const FieldInfo& field_info) {
4361 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4362
4363 LocationSummary* locations =
4364 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004365 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004366 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004367 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004368 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004369
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004370 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004371 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004372 if (is_volatile) {
4373 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4374 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4375 } else {
4376 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4377 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004378 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004379 if (is_volatile) {
4380 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4381 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4382 } else {
4383 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4384 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004385 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004386 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004387 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004388 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004389 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004390 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4391 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004392 locations->AddTemp(Location::RequiresRegister());
4393 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004394}
4395
Calin Juravle52c48962014-12-16 17:02:57 +00004396void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004397 const FieldInfo& field_info,
4398 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004399 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4400
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004401 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004402 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4403 Location value = locations->InAt(1);
4404 bool is_volatile = field_info.IsVolatile();
4405 Primitive::Type field_type = field_info.GetFieldType();
4406 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4407
4408 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004409 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004410 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004411
Mark Mendellea5af682015-10-22 17:35:49 -04004412 bool maybe_record_implicit_null_check_done = false;
4413
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004414 switch (field_type) {
4415 case Primitive::kPrimBoolean:
4416 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004417 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004418 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004419 __ movb(Address(base, offset), Immediate(v));
4420 } else {
4421 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4422 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004423 break;
4424 }
4425
4426 case Primitive::kPrimShort:
4427 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004428 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004429 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004430 __ movw(Address(base, offset), Immediate(v));
4431 } else {
4432 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4433 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004434 break;
4435 }
4436
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004437 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004438 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004439 if (value.IsConstant()) {
4440 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004441 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4442 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4443 // Note: if heap poisoning is enabled, no need to poison
4444 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004445 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004446 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004447 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4448 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4449 __ movl(temp, value.AsRegister<CpuRegister>());
4450 __ PoisonHeapReference(temp);
4451 __ movl(Address(base, offset), temp);
4452 } else {
4453 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4454 }
Mark Mendell40741f32015-04-20 22:10:34 -04004455 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004456 break;
4457 }
4458
4459 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004460 if (value.IsConstant()) {
4461 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004462 codegen_->MoveInt64ToAddress(Address(base, offset),
4463 Address(base, offset + sizeof(int32_t)),
4464 v,
4465 instruction);
4466 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004467 } else {
4468 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4469 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004470 break;
4471 }
4472
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004473 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004474 if (value.IsConstant()) {
4475 int32_t v =
4476 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4477 __ movl(Address(base, offset), Immediate(v));
4478 } else {
4479 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4480 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004481 break;
4482 }
4483
4484 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004485 if (value.IsConstant()) {
4486 int64_t v =
4487 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4488 codegen_->MoveInt64ToAddress(Address(base, offset),
4489 Address(base, offset + sizeof(int32_t)),
4490 v,
4491 instruction);
4492 maybe_record_implicit_null_check_done = true;
4493 } else {
4494 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4495 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004496 break;
4497 }
4498
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004499 case Primitive::kPrimVoid:
4500 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004501 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004502 }
Calin Juravle52c48962014-12-16 17:02:57 +00004503
Mark Mendellea5af682015-10-22 17:35:49 -04004504 if (!maybe_record_implicit_null_check_done) {
4505 codegen_->MaybeRecordImplicitNullCheck(instruction);
4506 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004507
4508 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4509 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4510 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004511 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004512 }
4513
Calin Juravle52c48962014-12-16 17:02:57 +00004514 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004515 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004516 }
4517}
4518
4519void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4520 HandleFieldSet(instruction, instruction->GetFieldInfo());
4521}
4522
4523void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004524 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004525}
4526
4527void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004528 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004529}
4530
4531void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004532 HandleFieldGet(instruction, instruction->GetFieldInfo());
4533}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004534
Calin Juravle52c48962014-12-16 17:02:57 +00004535void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4536 HandleFieldGet(instruction);
4537}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004538
Calin Juravle52c48962014-12-16 17:02:57 +00004539void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4540 HandleFieldGet(instruction, instruction->GetFieldInfo());
4541}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004542
Calin Juravle52c48962014-12-16 17:02:57 +00004543void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4544 HandleFieldSet(instruction, instruction->GetFieldInfo());
4545}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004546
Calin Juravle52c48962014-12-16 17:02:57 +00004547void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004548 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004549}
4550
Calin Juravlee460d1d2015-09-29 04:52:17 +01004551void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4552 HUnresolvedInstanceFieldGet* instruction) {
4553 FieldAccessCallingConventionX86_64 calling_convention;
4554 codegen_->CreateUnresolvedFieldLocationSummary(
4555 instruction, instruction->GetFieldType(), calling_convention);
4556}
4557
4558void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4559 HUnresolvedInstanceFieldGet* instruction) {
4560 FieldAccessCallingConventionX86_64 calling_convention;
4561 codegen_->GenerateUnresolvedFieldAccess(instruction,
4562 instruction->GetFieldType(),
4563 instruction->GetFieldIndex(),
4564 instruction->GetDexPc(),
4565 calling_convention);
4566}
4567
4568void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4569 HUnresolvedInstanceFieldSet* instruction) {
4570 FieldAccessCallingConventionX86_64 calling_convention;
4571 codegen_->CreateUnresolvedFieldLocationSummary(
4572 instruction, instruction->GetFieldType(), calling_convention);
4573}
4574
4575void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4576 HUnresolvedInstanceFieldSet* instruction) {
4577 FieldAccessCallingConventionX86_64 calling_convention;
4578 codegen_->GenerateUnresolvedFieldAccess(instruction,
4579 instruction->GetFieldType(),
4580 instruction->GetFieldIndex(),
4581 instruction->GetDexPc(),
4582 calling_convention);
4583}
4584
4585void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4586 HUnresolvedStaticFieldGet* instruction) {
4587 FieldAccessCallingConventionX86_64 calling_convention;
4588 codegen_->CreateUnresolvedFieldLocationSummary(
4589 instruction, instruction->GetFieldType(), calling_convention);
4590}
4591
4592void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4593 HUnresolvedStaticFieldGet* instruction) {
4594 FieldAccessCallingConventionX86_64 calling_convention;
4595 codegen_->GenerateUnresolvedFieldAccess(instruction,
4596 instruction->GetFieldType(),
4597 instruction->GetFieldIndex(),
4598 instruction->GetDexPc(),
4599 calling_convention);
4600}
4601
4602void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4603 HUnresolvedStaticFieldSet* instruction) {
4604 FieldAccessCallingConventionX86_64 calling_convention;
4605 codegen_->CreateUnresolvedFieldLocationSummary(
4606 instruction, instruction->GetFieldType(), calling_convention);
4607}
4608
4609void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4610 HUnresolvedStaticFieldSet* instruction) {
4611 FieldAccessCallingConventionX86_64 calling_convention;
4612 codegen_->GenerateUnresolvedFieldAccess(instruction,
4613 instruction->GetFieldType(),
4614 instruction->GetFieldIndex(),
4615 instruction->GetDexPc(),
4616 calling_convention);
4617}
4618
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004619void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004620 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4621 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4622 ? Location::RequiresRegister()
4623 : Location::Any();
4624 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004625}
4626
Calin Juravle2ae48182016-03-16 14:05:09 +00004627void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4628 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004629 return;
4630 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004631 LocationSummary* locations = instruction->GetLocations();
4632 Location obj = locations->InAt(0);
4633
4634 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004635 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004636}
4637
Calin Juravle2ae48182016-03-16 14:05:09 +00004638void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004639 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004640 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004641
4642 LocationSummary* locations = instruction->GetLocations();
4643 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004644
4645 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004646 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004647 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004648 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004649 } else {
4650 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004651 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004652 __ jmp(slow_path->GetEntryLabel());
4653 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004654 }
4655 __ j(kEqual, slow_path->GetEntryLabel());
4656}
4657
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004658void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004659 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004660}
4661
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004662void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004663 bool object_array_get_with_read_barrier =
4664 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004665 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004666 new (GetGraph()->GetArena()) LocationSummary(instruction,
4667 object_array_get_with_read_barrier ?
4668 LocationSummary::kCallOnSlowPath :
4669 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004670 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004671 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004672 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004673 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004674 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004675 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4676 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4677 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004678 // The output overlaps for an object array get when read barriers
4679 // are enabled: we do not want the move to overwrite the array's
4680 // location, as we need it to emit the read barrier.
4681 locations->SetOut(
4682 Location::RequiresRegister(),
4683 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004684 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004685}
4686
4687void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4688 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004689 Location obj_loc = locations->InAt(0);
4690 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004691 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004692 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004693 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004694
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004695 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004696 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004697 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004698 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004699 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004700 break;
4701 }
4702
4703 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004704 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004705 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004706 break;
4707 }
4708
4709 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004710 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004711 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004712 break;
4713 }
4714
4715 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004716 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004717 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4718 // Branch cases into compressed and uncompressed for each index's type.
4719 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4720 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00004721 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004722 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004723 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4724 "Expecting 0=compressed, 1=uncompressed");
4725 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004726 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4727 __ jmp(&done);
4728 __ Bind(&not_compressed);
4729 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4730 __ Bind(&done);
4731 } else {
4732 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4733 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004734 break;
4735 }
4736
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004737 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004738 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004739 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004740 break;
4741 }
4742
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004743 case Primitive::kPrimNot: {
4744 static_assert(
4745 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4746 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004747 // /* HeapReference<Object> */ out =
4748 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4749 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004750 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004751 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004752 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004753 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004754 } else {
4755 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004756 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4757 codegen_->MaybeRecordImplicitNullCheck(instruction);
4758 // If read barriers are enabled, emit read barriers other than
4759 // Baker's using a slow path (and also unpoison the loaded
4760 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004761 if (index.IsConstant()) {
4762 uint32_t offset =
4763 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004764 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4765 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004766 codegen_->MaybeGenerateReadBarrierSlow(
4767 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4768 }
4769 }
4770 break;
4771 }
4772
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004773 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004774 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004775 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004776 break;
4777 }
4778
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004779 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004780 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004781 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004782 break;
4783 }
4784
4785 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004786 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004787 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004788 break;
4789 }
4790
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004791 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004792 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004793 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004794 }
Roland Levillain4d027112015-07-01 15:41:14 +01004795
4796 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004797 // Potential implicit null checks, in the case of reference
4798 // arrays, are handled in the previous switch statement.
4799 } else {
4800 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004801 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004802}
4803
4804void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004805 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004806
4807 bool needs_write_barrier =
4808 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004809 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004810
Nicolas Geoffray39468442014-09-02 15:17:15 +01004811 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004812 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004813 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004814 LocationSummary::kCallOnSlowPath :
4815 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004816
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004817 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004818 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4819 if (Primitive::IsFloatingPointType(value_type)) {
4820 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004821 } else {
4822 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4823 }
4824
4825 if (needs_write_barrier) {
4826 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004827 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004828 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004829 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004830}
4831
4832void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4833 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004834 Location array_loc = locations->InAt(0);
4835 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004836 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004837 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004838 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004839 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004840 bool needs_write_barrier =
4841 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004842 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4843 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4844 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004845
4846 switch (value_type) {
4847 case Primitive::kPrimBoolean:
4848 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004849 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004850 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004851 if (value.IsRegister()) {
4852 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004853 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004854 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004855 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004856 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004857 break;
4858 }
4859
4860 case Primitive::kPrimShort:
4861 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004862 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004863 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004864 if (value.IsRegister()) {
4865 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004866 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004867 DCHECK(value.IsConstant()) << value;
4868 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004869 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004870 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004871 break;
4872 }
4873
4874 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004875 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004876 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004877
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004878 if (!value.IsRegister()) {
4879 // Just setting null.
4880 DCHECK(instruction->InputAt(2)->IsNullConstant());
4881 DCHECK(value.IsConstant()) << value;
4882 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004883 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004884 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004885 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004886 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004887 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004888
4889 DCHECK(needs_write_barrier);
4890 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004891 // We cannot use a NearLabel for `done`, as its range may be too
4892 // short when Baker read barriers are enabled.
4893 Label done;
4894 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004895 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004896 Location temp_loc = locations->GetTemp(0);
4897 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004898 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004899 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4900 codegen_->AddSlowPath(slow_path);
4901 if (instruction->GetValueCanBeNull()) {
4902 __ testl(register_value, register_value);
4903 __ j(kNotEqual, &not_null);
4904 __ movl(address, Immediate(0));
4905 codegen_->MaybeRecordImplicitNullCheck(instruction);
4906 __ jmp(&done);
4907 __ Bind(&not_null);
4908 }
4909
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004910 // Note that when Baker read barriers are enabled, the type
4911 // checks are performed without read barriers. This is fine,
4912 // even in the case where a class object is in the from-space
4913 // after the flip, as a comparison involving such a type would
4914 // not produce a false positive; it may of course produce a
4915 // false negative, in which case we would take the ArraySet
4916 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004917
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004918 // /* HeapReference<Class> */ temp = array->klass_
4919 __ movl(temp, Address(array, class_offset));
4920 codegen_->MaybeRecordImplicitNullCheck(instruction);
4921 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004922
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004923 // /* HeapReference<Class> */ temp = temp->component_type_
4924 __ movl(temp, Address(temp, component_offset));
4925 // If heap poisoning is enabled, no need to unpoison `temp`
4926 // nor the object reference in `register_value->klass`, as
4927 // we are comparing two poisoned references.
4928 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004929
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004930 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4931 __ j(kEqual, &do_put);
4932 // If heap poisoning is enabled, the `temp` reference has
4933 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004934 __ MaybeUnpoisonHeapReference(temp);
4935
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004936 // If heap poisoning is enabled, no need to unpoison the
4937 // heap reference loaded below, as it is only used for a
4938 // comparison with null.
4939 __ cmpl(Address(temp, super_offset), Immediate(0));
4940 __ j(kNotEqual, slow_path->GetEntryLabel());
4941 __ Bind(&do_put);
4942 } else {
4943 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004944 }
4945 }
4946
4947 if (kPoisonHeapReferences) {
4948 __ movl(temp, register_value);
4949 __ PoisonHeapReference(temp);
4950 __ movl(address, temp);
4951 } else {
4952 __ movl(address, register_value);
4953 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004954 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004955 codegen_->MaybeRecordImplicitNullCheck(instruction);
4956 }
4957
4958 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4959 codegen_->MarkGCCard(
4960 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4961 __ Bind(&done);
4962
4963 if (slow_path != nullptr) {
4964 __ Bind(slow_path->GetExitLabel());
4965 }
4966
4967 break;
4968 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004969
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004970 case Primitive::kPrimInt: {
4971 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004972 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004973 if (value.IsRegister()) {
4974 __ movl(address, value.AsRegister<CpuRegister>());
4975 } else {
4976 DCHECK(value.IsConstant()) << value;
4977 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4978 __ movl(address, Immediate(v));
4979 }
4980 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004981 break;
4982 }
4983
4984 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004985 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004986 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004987 if (value.IsRegister()) {
4988 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004989 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004990 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004991 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004992 Address address_high =
4993 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004994 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004995 }
4996 break;
4997 }
4998
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004999 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005000 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005001 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005002 if (value.IsFpuRegister()) {
5003 __ movss(address, value.AsFpuRegister<XmmRegister>());
5004 } else {
5005 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005006 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005007 __ movl(address, Immediate(v));
5008 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005009 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005010 break;
5011 }
5012
5013 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005014 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005015 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005016 if (value.IsFpuRegister()) {
5017 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5018 codegen_->MaybeRecordImplicitNullCheck(instruction);
5019 } else {
5020 int64_t v =
5021 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005022 Address address_high =
5023 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005024 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5025 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005026 break;
5027 }
5028
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005029 case Primitive::kPrimVoid:
5030 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005031 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005032 }
5033}
5034
5035void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005036 LocationSummary* locations =
5037 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005038 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005039 if (!instruction->IsEmittedAtUseSite()) {
5040 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5041 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005042}
5043
5044void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005045 if (instruction->IsEmittedAtUseSite()) {
5046 return;
5047 }
5048
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005049 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005050 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005051 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5052 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005053 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005054 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005055 // Mask out most significant bit in case the array is String's array of char.
5056 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005057 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005058 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005059}
5060
5061void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005062 RegisterSet caller_saves = RegisterSet::Empty();
5063 InvokeRuntimeCallingConvention calling_convention;
5064 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5065 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5066 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005067 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005068 HInstruction* length = instruction->InputAt(1);
5069 if (!length->IsEmittedAtUseSite()) {
5070 locations->SetInAt(1, Location::RegisterOrConstant(length));
5071 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005072}
5073
5074void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5075 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005076 Location index_loc = locations->InAt(0);
5077 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005078 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005079
Mark Mendell99dbd682015-04-22 16:18:52 -04005080 if (length_loc.IsConstant()) {
5081 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5082 if (index_loc.IsConstant()) {
5083 // BCE will remove the bounds check if we are guarenteed to pass.
5084 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5085 if (index < 0 || index >= length) {
5086 codegen_->AddSlowPath(slow_path);
5087 __ jmp(slow_path->GetEntryLabel());
5088 } else {
5089 // Some optimization after BCE may have generated this, and we should not
5090 // generate a bounds check if it is a valid range.
5091 }
5092 return;
5093 }
5094
5095 // We have to reverse the jump condition because the length is the constant.
5096 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5097 __ cmpl(index_reg, Immediate(length));
5098 codegen_->AddSlowPath(slow_path);
5099 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005100 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005101 HInstruction* array_length = instruction->InputAt(1);
5102 if (array_length->IsEmittedAtUseSite()) {
5103 // Address the length field in the array.
5104 DCHECK(array_length->IsArrayLength());
5105 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5106 Location array_loc = array_length->GetLocations()->InAt(0);
5107 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005108 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005109 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5110 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005111 CpuRegister length_reg = CpuRegister(TMP);
5112 __ movl(length_reg, array_len);
5113 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005114 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005115 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005116 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005117 // Checking the bound for general case:
5118 // Array of char or String's array when the compression feature off.
5119 if (index_loc.IsConstant()) {
5120 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5121 __ cmpl(array_len, Immediate(value));
5122 } else {
5123 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5124 }
5125 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005126 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005127 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005128 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005129 }
5130 codegen_->AddSlowPath(slow_path);
5131 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005132 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005133}
5134
5135void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5136 CpuRegister card,
5137 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005138 CpuRegister value,
5139 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005140 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005141 if (value_can_be_null) {
5142 __ testl(value, value);
5143 __ j(kEqual, &is_null);
5144 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005145 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005146 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005147 __ movq(temp, object);
5148 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005149 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005150 if (value_can_be_null) {
5151 __ Bind(&is_null);
5152 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005153}
5154
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005155void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005156 LOG(FATAL) << "Unimplemented";
5157}
5158
5159void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005160 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5161}
5162
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005163void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005164 LocationSummary* locations =
5165 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005166 // In suspend check slow path, usually there are no caller-save registers at all.
5167 // If SIMD instructions are present, however, we force spilling all live SIMD
5168 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005169 locations->SetCustomSlowPathCallerSaves(
5170 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005171}
5172
5173void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005174 HBasicBlock* block = instruction->GetBlock();
5175 if (block->GetLoopInformation() != nullptr) {
5176 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5177 // The back edge will generate the suspend check.
5178 return;
5179 }
5180 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5181 // The goto will generate the suspend check.
5182 return;
5183 }
5184 GenerateSuspendCheck(instruction, nullptr);
5185}
5186
5187void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5188 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005189 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005190 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5191 if (slow_path == nullptr) {
5192 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5193 instruction->SetSlowPath(slow_path);
5194 codegen_->AddSlowPath(slow_path);
5195 if (successor != nullptr) {
5196 DCHECK(successor->IsLoopHeader());
5197 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5198 }
5199 } else {
5200 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5201 }
5202
Andreas Gampe542451c2016-07-26 09:02:02 -07005203 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005204 /* no_rip */ true),
5205 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005206 if (successor == nullptr) {
5207 __ j(kNotEqual, slow_path->GetEntryLabel());
5208 __ Bind(slow_path->GetReturnLabel());
5209 } else {
5210 __ j(kEqual, codegen_->GetLabelOf(successor));
5211 __ jmp(slow_path->GetEntryLabel());
5212 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005213}
5214
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005215X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5216 return codegen_->GetAssembler();
5217}
5218
5219void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005220 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005221 Location source = move->GetSource();
5222 Location destination = move->GetDestination();
5223
5224 if (source.IsRegister()) {
5225 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005226 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005227 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005228 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005229 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005230 } else {
5231 DCHECK(destination.IsDoubleStackSlot());
5232 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005233 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005234 }
5235 } else if (source.IsStackSlot()) {
5236 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005237 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005238 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005239 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005240 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005241 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005242 } else {
5243 DCHECK(destination.IsStackSlot());
5244 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5245 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5246 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005247 } else if (source.IsDoubleStackSlot()) {
5248 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005249 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005250 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005251 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005252 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5253 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005254 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005255 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005256 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5257 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5258 }
Aart Bik5576f372017-03-23 16:17:37 -07005259 } else if (source.IsSIMDStackSlot()) {
5260 DCHECK(destination.IsFpuRegister());
5261 __ movups(destination.AsFpuRegister<XmmRegister>(),
5262 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005263 } else if (source.IsConstant()) {
5264 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005265 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5266 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005267 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005268 if (value == 0) {
5269 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5270 } else {
5271 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5272 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005273 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005274 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005275 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005276 }
5277 } else if (constant->IsLongConstant()) {
5278 int64_t value = constant->AsLongConstant()->GetValue();
5279 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005280 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005281 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005282 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005283 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005284 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005285 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005286 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005287 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005288 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005289 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005290 } else {
5291 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005292 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005293 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5294 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005295 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005296 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005297 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005298 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005299 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005300 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005301 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005302 } else {
5303 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005304 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005305 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005306 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005307 } else if (source.IsFpuRegister()) {
5308 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005309 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005310 } else if (destination.IsStackSlot()) {
5311 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005312 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005313 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005314 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005315 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005316 } else {
5317 DCHECK(destination.IsSIMDStackSlot());
5318 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5319 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005320 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005321 }
5322}
5323
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005324void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005325 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005326 __ movl(Address(CpuRegister(RSP), mem), reg);
5327 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005328}
5329
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005330void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005331 ScratchRegisterScope ensure_scratch(
5332 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5333
5334 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5335 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5336 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5337 Address(CpuRegister(RSP), mem2 + stack_offset));
5338 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5339 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5340 CpuRegister(ensure_scratch.GetRegister()));
5341}
5342
Mark Mendell8a1c7282015-06-29 15:41:28 -04005343void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5344 __ movq(CpuRegister(TMP), reg1);
5345 __ movq(reg1, reg2);
5346 __ movq(reg2, CpuRegister(TMP));
5347}
5348
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005349void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5350 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5351 __ movq(Address(CpuRegister(RSP), mem), reg);
5352 __ movq(reg, CpuRegister(TMP));
5353}
5354
5355void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5356 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005357 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005358
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005359 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5360 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5361 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5362 Address(CpuRegister(RSP), mem2 + stack_offset));
5363 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5364 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5365 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005366}
5367
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005368void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5369 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5370 __ movss(Address(CpuRegister(RSP), mem), reg);
5371 __ movd(reg, CpuRegister(TMP));
5372}
5373
5374void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5375 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5376 __ movsd(Address(CpuRegister(RSP), mem), reg);
5377 __ movd(reg, CpuRegister(TMP));
5378}
5379
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005380void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005381 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005382 Location source = move->GetSource();
5383 Location destination = move->GetDestination();
5384
5385 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005386 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005387 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005388 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005389 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005390 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005391 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005392 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5393 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005394 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005395 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005396 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005397 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5398 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005399 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005400 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5401 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5402 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005403 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005404 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005405 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005406 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005407 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005408 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005409 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005410 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005411 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005412 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005413 }
5414}
5415
5416
5417void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5418 __ pushq(CpuRegister(reg));
5419}
5420
5421
5422void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5423 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005424}
5425
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005426void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005427 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005428 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5429 Immediate(mirror::Class::kStatusInitialized));
5430 __ j(kLess, slow_path->GetEntryLabel());
5431 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005432 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005433}
5434
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005435HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5436 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005437 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005438 case HLoadClass::LoadKind::kInvalid:
5439 LOG(FATAL) << "UNREACHABLE";
5440 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005441 case HLoadClass::LoadKind::kReferrersClass:
5442 break;
5443 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5444 DCHECK(!GetCompilerOptions().GetCompilePic());
5445 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5446 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5447 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5448 DCHECK(GetCompilerOptions().GetCompilePic());
5449 break;
5450 case HLoadClass::LoadKind::kBootImageAddress:
5451 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005452 case HLoadClass::LoadKind::kBssEntry:
5453 DCHECK(!Runtime::Current()->UseJitCompilation());
5454 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005455 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005456 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005457 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005458 case HLoadClass::LoadKind::kDexCacheViaMethod:
5459 break;
5460 }
5461 return desired_class_load_kind;
5462}
5463
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005464void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005465 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5466 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005467 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005468 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005469 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005470 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005471 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005472 return;
5473 }
Vladimir Marko41559982017-01-06 14:04:23 +00005474 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005475
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005476 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5477 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005478 ? LocationSummary::kCallOnSlowPath
5479 : LocationSummary::kNoCall;
5480 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005481 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005482 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005483 }
5484
Vladimir Marko41559982017-01-06 14:04:23 +00005485 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005486 locations->SetInAt(0, Location::RequiresRegister());
5487 }
5488 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005489 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5490 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5491 // Rely on the type resolution and/or initialization to save everything.
5492 // Custom calling convention: RAX serves as both input and output.
5493 RegisterSet caller_saves = RegisterSet::Empty();
5494 caller_saves.Add(Location::RegisterLocation(RAX));
5495 locations->SetCustomSlowPathCallerSaves(caller_saves);
5496 } else {
5497 // For non-Baker read barrier we have a temp-clobbering call.
5498 }
5499 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005500}
5501
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005502Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
5503 dex::TypeIndex dex_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005504 Handle<mirror::Class> handle) {
5505 jit_class_roots_.Overwrite(
5506 TypeReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005507 // Add a patch entry and return the label.
5508 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
5509 PatchInfo<Label>* info = &jit_class_patches_.back();
5510 return &info->label;
5511}
5512
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005513// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5514// move.
5515void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005516 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5517 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5518 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005519 return;
5520 }
Vladimir Marko41559982017-01-06 14:04:23 +00005521 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005522
Vladimir Marko41559982017-01-06 14:04:23 +00005523 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005524 Location out_loc = locations->Out();
5525 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005526
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005527 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5528 ? kWithoutReadBarrier
5529 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005530 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005531 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005532 case HLoadClass::LoadKind::kReferrersClass: {
5533 DCHECK(!cls->CanCallRuntime());
5534 DCHECK(!cls->MustGenerateClinitCheck());
5535 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5536 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5537 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005538 cls,
5539 out_loc,
5540 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005541 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005542 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005543 break;
5544 }
5545 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005546 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005547 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005548 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko1998cd02017-01-13 13:02:58 +00005549 codegen_->RecordBootTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005550 break;
5551 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005552 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005553 uint32_t address = dchecked_integral_cast<uint32_t>(
5554 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
5555 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07005556 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005557 break;
5558 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005559 case HLoadClass::LoadKind::kBssEntry: {
5560 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5561 /* no_rip */ false);
5562 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
5563 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5564 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
5565 generate_null_check = true;
5566 break;
5567 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005568 case HLoadClass::LoadKind::kJitTableAddress: {
5569 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5570 /* no_rip */ true);
5571 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005572 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005573 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00005574 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005575 break;
5576 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005577 default:
5578 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5579 UNREACHABLE();
5580 }
5581
5582 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5583 DCHECK(cls->CanCallRuntime());
5584 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5585 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5586 codegen_->AddSlowPath(slow_path);
5587 if (generate_null_check) {
5588 __ testl(out, out);
5589 __ j(kEqual, slow_path->GetEntryLabel());
5590 }
5591 if (cls->MustGenerateClinitCheck()) {
5592 GenerateClassInitializationCheck(slow_path, out);
5593 } else {
5594 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005595 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005596 }
5597}
5598
5599void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5600 LocationSummary* locations =
5601 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5602 locations->SetInAt(0, Location::RequiresRegister());
5603 if (check->HasUses()) {
5604 locations->SetOut(Location::SameAsFirstInput());
5605 }
5606}
5607
5608void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005609 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005610 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005611 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005612 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005613 GenerateClassInitializationCheck(slow_path,
5614 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005615}
5616
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005617HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5618 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005619 switch (desired_string_load_kind) {
5620 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5621 DCHECK(!GetCompilerOptions().GetCompilePic());
5622 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5623 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5624 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5625 DCHECK(GetCompilerOptions().GetCompilePic());
5626 break;
5627 case HLoadString::LoadKind::kBootImageAddress:
5628 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005629 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005630 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005631 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005632 case HLoadString::LoadKind::kJitTableAddress:
5633 DCHECK(Runtime::Current()->UseJitCompilation());
5634 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005635 case HLoadString::LoadKind::kDexCacheViaMethod:
5636 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005637 }
5638 return desired_string_load_kind;
5639}
5640
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005641void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005642 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005643 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005644 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005645 locations->SetOut(Location::RegisterLocation(RAX));
5646 } else {
5647 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005648 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5649 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005650 // Rely on the pResolveString to save everything.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005651 // Custom calling convention: RAX serves as both input and output.
5652 RegisterSet caller_saves = RegisterSet::Empty();
5653 caller_saves.Add(Location::RegisterLocation(RAX));
5654 locations->SetCustomSlowPathCallerSaves(caller_saves);
5655 } else {
5656 // For non-Baker read barrier we have a temp-clobbering call.
5657 }
5658 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005659 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005660}
5661
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005662Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005663 dex::StringIndex dex_index,
5664 Handle<mirror::String> handle) {
5665 jit_string_roots_.Overwrite(
5666 StringReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005667 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005668 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005669 PatchInfo<Label>* info = &jit_string_patches_.back();
5670 return &info->label;
5671}
5672
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005673// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5674// move.
5675void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005676 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005677 Location out_loc = locations->Out();
5678 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005679
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005680 switch (load->GetLoadKind()) {
5681 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005682 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005683 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005684 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005685 return; // No dex cache slow path.
5686 }
5687 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005688 uint32_t address = dchecked_integral_cast<uint32_t>(
5689 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5690 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07005691 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005692 return; // No dex cache slow path.
5693 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005694 case HLoadString::LoadKind::kBssEntry: {
5695 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5696 /* no_rip */ false);
5697 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5698 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005699 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005700 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5701 codegen_->AddSlowPath(slow_path);
5702 __ testl(out, out);
5703 __ j(kEqual, slow_path->GetEntryLabel());
5704 __ Bind(slow_path->GetExitLabel());
5705 return;
5706 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005707 case HLoadString::LoadKind::kJitTableAddress: {
5708 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5709 /* no_rip */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005710 Label* fixup_label = codegen_->NewJitRootStringPatch(
5711 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005712 // /* GcRoot<mirror::String> */ out = *address
5713 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
5714 return;
5715 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005716 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005717 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005718 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005719
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005720 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005721 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005722 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005723 codegen_->InvokeRuntime(kQuickResolveString,
5724 load,
5725 load->GetDexPc());
5726 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005727}
5728
David Brazdilcb1c0552015-08-04 16:22:25 +01005729static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005730 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005731 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005732}
5733
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005734void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5735 LocationSummary* locations =
5736 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5737 locations->SetOut(Location::RequiresRegister());
5738}
5739
5740void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005741 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5742}
5743
5744void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5745 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5746}
5747
5748void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5749 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005750}
5751
5752void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5753 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005754 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005755 InvokeRuntimeCallingConvention calling_convention;
5756 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5757}
5758
5759void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005760 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005761 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005762}
5763
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005764static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5765 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005766 // We need a temporary for holding the iftable length.
5767 return true;
5768 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005769 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005770 !kUseBakerReadBarrier &&
5771 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005772 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5773 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5774}
5775
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005776static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5777 return kEmitCompilerReadBarrier &&
5778 !kUseBakerReadBarrier &&
5779 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5780 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5781 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5782}
5783
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005784void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005785 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005786 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005787 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005788 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005789 case TypeCheckKind::kExactCheck:
5790 case TypeCheckKind::kAbstractClassCheck:
5791 case TypeCheckKind::kClassHierarchyCheck:
5792 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005793 call_kind =
5794 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005795 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005796 break;
5797 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005798 case TypeCheckKind::kUnresolvedCheck:
5799 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005800 call_kind = LocationSummary::kCallOnSlowPath;
5801 break;
5802 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005803
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005804 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005805 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005806 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005807 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005808 locations->SetInAt(0, Location::RequiresRegister());
5809 locations->SetInAt(1, Location::Any());
5810 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5811 locations->SetOut(Location::RequiresRegister());
5812 // When read barriers are enabled, we need a temporary register for
5813 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005814 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005815 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005816 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005817}
5818
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005819void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005820 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005821 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005822 Location obj_loc = locations->InAt(0);
5823 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005824 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005825 Location out_loc = locations->Out();
5826 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005827 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005828 locations->GetTemp(0) :
5829 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005830 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005831 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5832 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5833 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005834 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005835 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005836
5837 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005838 // Avoid null check if we know obj is not null.
5839 if (instruction->MustDoNullCheck()) {
5840 __ testl(obj, obj);
5841 __ j(kEqual, &zero);
5842 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005843
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005844 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005845 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005846 // /* HeapReference<Class> */ out = obj->klass_
5847 GenerateReferenceLoadTwoRegisters(instruction,
5848 out_loc,
5849 obj_loc,
5850 class_offset,
5851 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005852 if (cls.IsRegister()) {
5853 __ cmpl(out, cls.AsRegister<CpuRegister>());
5854 } else {
5855 DCHECK(cls.IsStackSlot()) << cls;
5856 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5857 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005858 if (zero.IsLinked()) {
5859 // Classes must be equal for the instanceof to succeed.
5860 __ j(kNotEqual, &zero);
5861 __ movl(out, Immediate(1));
5862 __ jmp(&done);
5863 } else {
5864 __ setcc(kEqual, out);
5865 // setcc only sets the low byte.
5866 __ andl(out, Immediate(1));
5867 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005868 break;
5869 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005870
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005871 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005872 // /* HeapReference<Class> */ out = obj->klass_
5873 GenerateReferenceLoadTwoRegisters(instruction,
5874 out_loc,
5875 obj_loc,
5876 class_offset,
5877 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005878 // If the class is abstract, we eagerly fetch the super class of the
5879 // object to avoid doing a comparison we know will fail.
5880 NearLabel loop, success;
5881 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005882 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005883 GenerateReferenceLoadOneRegister(instruction,
5884 out_loc,
5885 super_offset,
5886 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005887 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005888 __ testl(out, out);
5889 // If `out` is null, we use it for the result, and jump to `done`.
5890 __ j(kEqual, &done);
5891 if (cls.IsRegister()) {
5892 __ cmpl(out, cls.AsRegister<CpuRegister>());
5893 } else {
5894 DCHECK(cls.IsStackSlot()) << cls;
5895 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5896 }
5897 __ j(kNotEqual, &loop);
5898 __ movl(out, Immediate(1));
5899 if (zero.IsLinked()) {
5900 __ jmp(&done);
5901 }
5902 break;
5903 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005904
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005905 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005906 // /* HeapReference<Class> */ out = obj->klass_
5907 GenerateReferenceLoadTwoRegisters(instruction,
5908 out_loc,
5909 obj_loc,
5910 class_offset,
5911 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005912 // Walk over the class hierarchy to find a match.
5913 NearLabel loop, success;
5914 __ Bind(&loop);
5915 if (cls.IsRegister()) {
5916 __ cmpl(out, cls.AsRegister<CpuRegister>());
5917 } else {
5918 DCHECK(cls.IsStackSlot()) << cls;
5919 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5920 }
5921 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005922 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005923 GenerateReferenceLoadOneRegister(instruction,
5924 out_loc,
5925 super_offset,
5926 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005927 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005928 __ testl(out, out);
5929 __ j(kNotEqual, &loop);
5930 // If `out` is null, we use it for the result, and jump to `done`.
5931 __ jmp(&done);
5932 __ Bind(&success);
5933 __ movl(out, Immediate(1));
5934 if (zero.IsLinked()) {
5935 __ jmp(&done);
5936 }
5937 break;
5938 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005939
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005940 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005941 // /* HeapReference<Class> */ out = obj->klass_
5942 GenerateReferenceLoadTwoRegisters(instruction,
5943 out_loc,
5944 obj_loc,
5945 class_offset,
5946 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005947 // Do an exact check.
5948 NearLabel exact_check;
5949 if (cls.IsRegister()) {
5950 __ cmpl(out, cls.AsRegister<CpuRegister>());
5951 } else {
5952 DCHECK(cls.IsStackSlot()) << cls;
5953 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5954 }
5955 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005956 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005957 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005958 GenerateReferenceLoadOneRegister(instruction,
5959 out_loc,
5960 component_offset,
5961 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005962 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005963 __ testl(out, out);
5964 // If `out` is null, we use it for the result, and jump to `done`.
5965 __ j(kEqual, &done);
5966 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5967 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005968 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005969 __ movl(out, Immediate(1));
5970 __ jmp(&done);
5971 break;
5972 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005973
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005974 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005975 // No read barrier since the slow path will retry upon failure.
5976 // /* HeapReference<Class> */ out = obj->klass_
5977 GenerateReferenceLoadTwoRegisters(instruction,
5978 out_loc,
5979 obj_loc,
5980 class_offset,
5981 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005982 if (cls.IsRegister()) {
5983 __ cmpl(out, cls.AsRegister<CpuRegister>());
5984 } else {
5985 DCHECK(cls.IsStackSlot()) << cls;
5986 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5987 }
5988 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005989 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5990 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005991 codegen_->AddSlowPath(slow_path);
5992 __ j(kNotEqual, slow_path->GetEntryLabel());
5993 __ movl(out, Immediate(1));
5994 if (zero.IsLinked()) {
5995 __ jmp(&done);
5996 }
5997 break;
5998 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005999
Calin Juravle98893e12015-10-02 21:05:03 +01006000 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006001 case TypeCheckKind::kInterfaceCheck: {
6002 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006003 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006004 // cases.
6005 //
6006 // We cannot directly call the InstanceofNonTrivial runtime
6007 // entry point without resorting to a type checking slow path
6008 // here (i.e. by calling InvokeRuntime directly), as it would
6009 // require to assign fixed registers for the inputs of this
6010 // HInstanceOf instruction (following the runtime calling
6011 // convention), which might be cluttered by the potential first
6012 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006013 //
6014 // TODO: Introduce a new runtime entry point taking the object
6015 // to test (instead of its class) as argument, and let it deal
6016 // with the read barrier issues. This will let us refactor this
6017 // case of the `switch` code as it was previously (with a direct
6018 // call to the runtime not using a type checking slow path).
6019 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006020 DCHECK(locations->OnlyCallsOnSlowPath());
6021 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6022 /* is_fatal */ false);
6023 codegen_->AddSlowPath(slow_path);
6024 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006025 if (zero.IsLinked()) {
6026 __ jmp(&done);
6027 }
6028 break;
6029 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006030 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006031
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006032 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006033 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006034 __ xorl(out, out);
6035 }
6036
6037 if (done.IsLinked()) {
6038 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006039 }
6040
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006041 if (slow_path != nullptr) {
6042 __ Bind(slow_path->GetExitLabel());
6043 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006044}
6045
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006046static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006047 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006048 case TypeCheckKind::kExactCheck:
6049 case TypeCheckKind::kAbstractClassCheck:
6050 case TypeCheckKind::kClassHierarchyCheck:
6051 case TypeCheckKind::kArrayObjectCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006052 return !throws_into_catch && !kEmitCompilerReadBarrier;
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006053 case TypeCheckKind::kInterfaceCheck:
6054 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006055 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006056 case TypeCheckKind::kUnresolvedCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006057 return false;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006058 }
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006059 LOG(FATAL) << "Unreachable";
6060 UNREACHABLE();
6061}
6062
6063void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
6064 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
6065 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6066 bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
6067 LocationSummary::CallKind call_kind = is_fatal_slow_path
6068 ? LocationSummary::kNoCall
6069 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006070 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6071 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006072 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6073 // Require a register for the interface check since there is a loop that compares the class to
6074 // a memory address.
6075 locations->SetInAt(1, Location::RequiresRegister());
6076 } else {
6077 locations->SetInAt(1, Location::Any());
6078 }
6079
Roland Levillain0d5a2812015-11-13 10:07:31 +00006080 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6081 locations->AddTemp(Location::RequiresRegister());
6082 // When read barriers are enabled, we need an additional temporary
6083 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006084 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006085 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006086 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006087}
6088
6089void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006090 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006091 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006092 Location obj_loc = locations->InAt(0);
6093 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006094 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006095 Location temp_loc = locations->GetTemp(0);
6096 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006097 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006098 locations->GetTemp(1) :
6099 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006100 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6101 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6102 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6103 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6104 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6105 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006106 const uint32_t object_array_data_offset =
6107 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006108
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006109 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6110 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6111 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006112 bool is_type_check_slow_path_fatal =
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006113 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006114 SlowPathCode* type_check_slow_path =
6115 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6116 is_type_check_slow_path_fatal);
6117 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006118
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006119
6120 NearLabel done;
6121 // Avoid null check if we know obj is not null.
6122 if (instruction->MustDoNullCheck()) {
6123 __ testl(obj, obj);
6124 __ j(kEqual, &done);
6125 }
6126
Roland Levillain0d5a2812015-11-13 10:07:31 +00006127 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006128 case TypeCheckKind::kExactCheck:
6129 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006130 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006131 GenerateReferenceLoadTwoRegisters(instruction,
6132 temp_loc,
6133 obj_loc,
6134 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006135 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006136 if (cls.IsRegister()) {
6137 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6138 } else {
6139 DCHECK(cls.IsStackSlot()) << cls;
6140 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6141 }
6142 // Jump to slow path for throwing the exception or doing a
6143 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006144 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006145 break;
6146 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006147
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006148 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006149 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006150 GenerateReferenceLoadTwoRegisters(instruction,
6151 temp_loc,
6152 obj_loc,
6153 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006154 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006155 // If the class is abstract, we eagerly fetch the super class of the
6156 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006157 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006158 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006159 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006160 GenerateReferenceLoadOneRegister(instruction,
6161 temp_loc,
6162 super_offset,
6163 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006164 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006165
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006166 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6167 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006168 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006169 // Otherwise, compare the classes.
6170 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006171 if (cls.IsRegister()) {
6172 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6173 } else {
6174 DCHECK(cls.IsStackSlot()) << cls;
6175 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6176 }
6177 __ j(kNotEqual, &loop);
6178 break;
6179 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006180
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006181 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006182 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006183 GenerateReferenceLoadTwoRegisters(instruction,
6184 temp_loc,
6185 obj_loc,
6186 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006187 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006188 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006189 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006190 __ Bind(&loop);
6191 if (cls.IsRegister()) {
6192 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6193 } else {
6194 DCHECK(cls.IsStackSlot()) << cls;
6195 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6196 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006197 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006198
Roland Levillain0d5a2812015-11-13 10:07:31 +00006199 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006200 GenerateReferenceLoadOneRegister(instruction,
6201 temp_loc,
6202 super_offset,
6203 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006204 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006205
6206 // If the class reference currently in `temp` is not null, jump
6207 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006208 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006209 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006210 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006211 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006212 break;
6213 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006214
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006215 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006216 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006217 GenerateReferenceLoadTwoRegisters(instruction,
6218 temp_loc,
6219 obj_loc,
6220 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006221 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006222 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006223 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006224 if (cls.IsRegister()) {
6225 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6226 } else {
6227 DCHECK(cls.IsStackSlot()) << cls;
6228 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6229 }
6230 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006231
6232 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006233 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006234 GenerateReferenceLoadOneRegister(instruction,
6235 temp_loc,
6236 component_offset,
6237 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006238 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006239
6240 // If the component type is not null (i.e. the object is indeed
6241 // an array), jump to label `check_non_primitive_component_type`
6242 // to further check that this component type is not a primitive
6243 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006244 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006245 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006246 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006247 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006248 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006249 break;
6250 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006251
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006252 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006253 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006254 //
6255 // We cannot directly call the CheckCast runtime entry point
6256 // without resorting to a type checking slow path here (i.e. by
6257 // calling InvokeRuntime directly), as it would require to
6258 // assign fixed registers for the inputs of this HInstanceOf
6259 // instruction (following the runtime calling convention), which
6260 // might be cluttered by the potential first read barrier
6261 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006262 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006263 break;
6264 }
6265
6266 case TypeCheckKind::kInterfaceCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006267 // Fast path for the interface check. We always go slow path for heap poisoning since
6268 // unpoisoning cls would require an extra temp.
6269 if (!kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006270 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6271 // doing this.
6272 // /* HeapReference<Class> */ temp = obj->klass_
6273 GenerateReferenceLoadTwoRegisters(instruction,
6274 temp_loc,
6275 obj_loc,
6276 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006277 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006278
6279 // /* HeapReference<Class> */ temp = temp->iftable_
6280 GenerateReferenceLoadTwoRegisters(instruction,
6281 temp_loc,
6282 temp_loc,
6283 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006284 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006285 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006286 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006287 // Loop through the iftable and check if any class matches.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006288 NearLabel start_loop;
6289 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006290 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006291 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006292 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6293 // Go to next interface if the classes do not match.
6294 __ cmpl(cls.AsRegister<CpuRegister>(),
6295 CodeGeneratorX86_64::ArrayAddress(temp,
6296 maybe_temp2_loc,
6297 TIMES_4,
6298 object_array_data_offset));
6299 __ j(kNotEqual, &start_loop); // Return if same class.
6300 } else {
6301 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006302 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006303 break;
6304 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006305
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006306 if (done.IsLinked()) {
6307 __ Bind(&done);
6308 }
6309
Roland Levillain0d5a2812015-11-13 10:07:31 +00006310 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006311}
6312
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006313void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6314 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006315 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006316 InvokeRuntimeCallingConvention calling_convention;
6317 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6318}
6319
6320void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006321 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006322 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006323 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006324 if (instruction->IsEnter()) {
6325 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6326 } else {
6327 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6328 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006329}
6330
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006331void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6332void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6333void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6334
6335void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6336 LocationSummary* locations =
6337 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6338 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6339 || instruction->GetResultType() == Primitive::kPrimLong);
6340 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006341 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006342 locations->SetOut(Location::SameAsFirstInput());
6343}
6344
6345void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6346 HandleBitwiseOperation(instruction);
6347}
6348
6349void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6350 HandleBitwiseOperation(instruction);
6351}
6352
6353void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6354 HandleBitwiseOperation(instruction);
6355}
6356
6357void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6358 LocationSummary* locations = instruction->GetLocations();
6359 Location first = locations->InAt(0);
6360 Location second = locations->InAt(1);
6361 DCHECK(first.Equals(locations->Out()));
6362
6363 if (instruction->GetResultType() == Primitive::kPrimInt) {
6364 if (second.IsRegister()) {
6365 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006366 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006367 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006368 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006369 } else {
6370 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006371 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006372 }
6373 } else if (second.IsConstant()) {
6374 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6375 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006376 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006377 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006378 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006379 } else {
6380 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006381 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006382 }
6383 } else {
6384 Address address(CpuRegister(RSP), second.GetStackIndex());
6385 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006386 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006387 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006388 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006389 } else {
6390 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006391 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006392 }
6393 }
6394 } else {
6395 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006396 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6397 bool second_is_constant = false;
6398 int64_t value = 0;
6399 if (second.IsConstant()) {
6400 second_is_constant = true;
6401 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006402 }
Mark Mendell40741f32015-04-20 22:10:34 -04006403 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006404
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006405 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006406 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006407 if (is_int32_value) {
6408 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6409 } else {
6410 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6411 }
6412 } else if (second.IsDoubleStackSlot()) {
6413 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006414 } else {
6415 __ andq(first_reg, second.AsRegister<CpuRegister>());
6416 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006417 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006418 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006419 if (is_int32_value) {
6420 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6421 } else {
6422 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6423 }
6424 } else if (second.IsDoubleStackSlot()) {
6425 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006426 } else {
6427 __ orq(first_reg, second.AsRegister<CpuRegister>());
6428 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006429 } else {
6430 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006431 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006432 if (is_int32_value) {
6433 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6434 } else {
6435 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6436 }
6437 } else if (second.IsDoubleStackSlot()) {
6438 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006439 } else {
6440 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6441 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006442 }
6443 }
6444}
6445
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006446void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6447 HInstruction* instruction,
6448 Location out,
6449 uint32_t offset,
6450 Location maybe_temp,
6451 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006452 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006453 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006454 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006455 if (kUseBakerReadBarrier) {
6456 // Load with fast path based Baker's read barrier.
6457 // /* HeapReference<Object> */ out = *(out + offset)
6458 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006459 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006460 } else {
6461 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006462 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006463 // in the following move operation, as we will need it for the
6464 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006465 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006466 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006467 // /* HeapReference<Object> */ out = *(out + offset)
6468 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006469 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006470 }
6471 } else {
6472 // Plain load with no read barrier.
6473 // /* HeapReference<Object> */ out = *(out + offset)
6474 __ movl(out_reg, Address(out_reg, offset));
6475 __ MaybeUnpoisonHeapReference(out_reg);
6476 }
6477}
6478
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006479void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6480 HInstruction* instruction,
6481 Location out,
6482 Location obj,
6483 uint32_t offset,
6484 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006485 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6486 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006487 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006488 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006489 if (kUseBakerReadBarrier) {
6490 // Load with fast path based Baker's read barrier.
6491 // /* HeapReference<Object> */ out = *(obj + offset)
6492 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006493 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006494 } else {
6495 // Load with slow path based read barrier.
6496 // /* HeapReference<Object> */ out = *(obj + offset)
6497 __ movl(out_reg, Address(obj_reg, offset));
6498 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6499 }
6500 } else {
6501 // Plain load with no read barrier.
6502 // /* HeapReference<Object> */ out = *(obj + offset)
6503 __ movl(out_reg, Address(obj_reg, offset));
6504 __ MaybeUnpoisonHeapReference(out_reg);
6505 }
6506}
6507
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006508void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6509 HInstruction* instruction,
6510 Location root,
6511 const Address& address,
6512 Label* fixup_label,
6513 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006514 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006515 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006516 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006517 if (kUseBakerReadBarrier) {
6518 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6519 // Baker's read barrier are used:
6520 //
Roland Levillaind966ce72017-02-09 16:20:14 +00006521 // root = obj.field;
6522 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6523 // if (temp != null) {
6524 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006525 // }
6526
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006527 // /* GcRoot<mirror::Object> */ root = *address
6528 __ movl(root_reg, address);
6529 if (fixup_label != nullptr) {
6530 __ Bind(fixup_label);
6531 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006532 static_assert(
6533 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6534 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6535 "have different sizes.");
6536 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6537 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6538 "have different sizes.");
6539
Vladimir Marko953437b2016-08-24 08:30:46 +00006540 // Slow path marking the GC root `root`.
6541 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006542 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006543 codegen_->AddSlowPath(slow_path);
6544
Roland Levillaind966ce72017-02-09 16:20:14 +00006545 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
6546 const int32_t entry_point_offset =
6547 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
6548 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip */ true), Immediate(0));
6549 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006550 __ j(kNotEqual, slow_path->GetEntryLabel());
6551 __ Bind(slow_path->GetExitLabel());
6552 } else {
6553 // GC root loaded through a slow path for read barriers other
6554 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006555 // /* GcRoot<mirror::Object>* */ root = address
6556 __ leaq(root_reg, address);
6557 if (fixup_label != nullptr) {
6558 __ Bind(fixup_label);
6559 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006560 // /* mirror::Object* */ root = root->Read()
6561 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6562 }
6563 } else {
6564 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006565 // /* GcRoot<mirror::Object> */ root = *address
6566 __ movl(root_reg, address);
6567 if (fixup_label != nullptr) {
6568 __ Bind(fixup_label);
6569 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006570 // Note that GC roots are not affected by heap poisoning, thus we
6571 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006572 }
6573}
6574
6575void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6576 Location ref,
6577 CpuRegister obj,
6578 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006579 bool needs_null_check) {
6580 DCHECK(kEmitCompilerReadBarrier);
6581 DCHECK(kUseBakerReadBarrier);
6582
6583 // /* HeapReference<Object> */ ref = *(obj + offset)
6584 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006585 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006586}
6587
6588void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6589 Location ref,
6590 CpuRegister obj,
6591 uint32_t data_offset,
6592 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006593 bool needs_null_check) {
6594 DCHECK(kEmitCompilerReadBarrier);
6595 DCHECK(kUseBakerReadBarrier);
6596
Roland Levillain3d312422016-06-23 13:53:42 +01006597 static_assert(
6598 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6599 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006600 // /* HeapReference<Object> */ ref =
6601 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006602 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006603 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006604}
6605
6606void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6607 Location ref,
6608 CpuRegister obj,
6609 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006610 bool needs_null_check,
6611 bool always_update_field,
6612 CpuRegister* temp1,
6613 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006614 DCHECK(kEmitCompilerReadBarrier);
6615 DCHECK(kUseBakerReadBarrier);
6616
6617 // In slow path based read barriers, the read barrier call is
6618 // inserted after the original load. However, in fast path based
6619 // Baker's read barriers, we need to perform the load of
6620 // mirror::Object::monitor_ *before* the original reference load.
6621 // This load-load ordering is required by the read barrier.
6622 // The fast path/slow path (for Baker's algorithm) should look like:
6623 //
6624 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6625 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6626 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006627 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006628 // if (is_gray) {
6629 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6630 // }
6631 //
6632 // Note: the original implementation in ReadBarrier::Barrier is
6633 // slightly more complex as:
6634 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006635 // the high-bits of rb_state, which are expected to be all zeroes
6636 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6637 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006638 // - it performs additional checks that we do not do here for
6639 // performance reasons.
6640
6641 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006642 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6643
Vladimir Marko953437b2016-08-24 08:30:46 +00006644 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006645 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6646 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006647 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6648 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6649 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6650
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006651 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006652 // ref = ReadBarrier::Mark(ref);
6653 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6654 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006655 if (needs_null_check) {
6656 MaybeRecordImplicitNullCheck(instruction);
6657 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006658
6659 // Load fence to prevent load-load reordering.
6660 // Note that this is a no-op, thanks to the x86-64 memory model.
6661 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6662
6663 // The actual reference load.
6664 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006665 __ movl(ref_reg, src); // Flags are unaffected.
6666
6667 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6668 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006669 SlowPathCode* slow_path;
6670 if (always_update_field) {
6671 DCHECK(temp1 != nullptr);
6672 DCHECK(temp2 != nullptr);
6673 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6674 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6675 } else {
6676 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6677 instruction, ref, /* unpoison_ref_before_marking */ true);
6678 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006679 AddSlowPath(slow_path);
6680
6681 // We have done the "if" of the gray bit check above, now branch based on the flags.
6682 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006683
6684 // Object* ref = ref_addr->AsMirrorPtr()
6685 __ MaybeUnpoisonHeapReference(ref_reg);
6686
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006687 __ Bind(slow_path->GetExitLabel());
6688}
6689
6690void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6691 Location out,
6692 Location ref,
6693 Location obj,
6694 uint32_t offset,
6695 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006696 DCHECK(kEmitCompilerReadBarrier);
6697
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006698 // Insert a slow path based read barrier *after* the reference load.
6699 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006700 // If heap poisoning is enabled, the unpoisoning of the loaded
6701 // reference will be carried out by the runtime within the slow
6702 // path.
6703 //
6704 // Note that `ref` currently does not get unpoisoned (when heap
6705 // poisoning is enabled), which is alright as the `ref` argument is
6706 // not used by the artReadBarrierSlow entry point.
6707 //
6708 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6709 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6710 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6711 AddSlowPath(slow_path);
6712
Roland Levillain0d5a2812015-11-13 10:07:31 +00006713 __ jmp(slow_path->GetEntryLabel());
6714 __ Bind(slow_path->GetExitLabel());
6715}
6716
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006717void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6718 Location out,
6719 Location ref,
6720 Location obj,
6721 uint32_t offset,
6722 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006723 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006724 // Baker's read barriers shall be handled by the fast path
6725 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6726 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006727 // If heap poisoning is enabled, unpoisoning will be taken care of
6728 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006729 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006730 } else if (kPoisonHeapReferences) {
6731 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6732 }
6733}
6734
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006735void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6736 Location out,
6737 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006738 DCHECK(kEmitCompilerReadBarrier);
6739
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006740 // Insert a slow path based read barrier *after* the GC root load.
6741 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006742 // Note that GC roots are not affected by heap poisoning, so we do
6743 // not need to do anything special for this here.
6744 SlowPathCode* slow_path =
6745 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6746 AddSlowPath(slow_path);
6747
Roland Levillain0d5a2812015-11-13 10:07:31 +00006748 __ jmp(slow_path->GetEntryLabel());
6749 __ Bind(slow_path->GetExitLabel());
6750}
6751
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006752void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006753 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006754 LOG(FATAL) << "Unreachable";
6755}
6756
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006757void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006758 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006759 LOG(FATAL) << "Unreachable";
6760}
6761
Mark Mendellfe57faa2015-09-18 09:26:15 -04006762// Simple implementation of packed switch - generate cascaded compare/jumps.
6763void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6764 LocationSummary* locations =
6765 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6766 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006767 locations->AddTemp(Location::RequiresRegister());
6768 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006769}
6770
6771void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6772 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006773 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006774 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006775 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6776 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6777 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006778 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6779
6780 // Should we generate smaller inline compare/jumps?
6781 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6782 // Figure out the correct compare values and jump conditions.
6783 // Handle the first compare/branch as a special case because it might
6784 // jump to the default case.
6785 DCHECK_GT(num_entries, 2u);
6786 Condition first_condition;
6787 uint32_t index;
6788 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6789 if (lower_bound != 0) {
6790 first_condition = kLess;
6791 __ cmpl(value_reg_in, Immediate(lower_bound));
6792 __ j(first_condition, codegen_->GetLabelOf(default_block));
6793 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6794
6795 index = 1;
6796 } else {
6797 // Handle all the compare/jumps below.
6798 first_condition = kBelow;
6799 index = 0;
6800 }
6801
6802 // Handle the rest of the compare/jumps.
6803 for (; index + 1 < num_entries; index += 2) {
6804 int32_t compare_to_value = lower_bound + index + 1;
6805 __ cmpl(value_reg_in, Immediate(compare_to_value));
6806 // Jump to successors[index] if value < case_value[index].
6807 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6808 // Jump to successors[index + 1] if value == case_value[index + 1].
6809 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6810 }
6811
6812 if (index != num_entries) {
6813 // There are an odd number of entries. Handle the last one.
6814 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006815 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006816 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6817 }
6818
6819 // And the default for any other value.
6820 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6821 __ jmp(codegen_->GetLabelOf(default_block));
6822 }
6823 return;
6824 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006825
6826 // Remove the bias, if needed.
6827 Register value_reg_out = value_reg_in.AsRegister();
6828 if (lower_bound != 0) {
6829 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6830 value_reg_out = temp_reg.AsRegister();
6831 }
6832 CpuRegister value_reg(value_reg_out);
6833
6834 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006835 __ cmpl(value_reg, Immediate(num_entries - 1));
6836 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006837
Mark Mendell9c86b482015-09-18 13:36:07 -04006838 // We are in the range of the table.
6839 // Load the address of the jump table in the constant area.
6840 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006841
Mark Mendell9c86b482015-09-18 13:36:07 -04006842 // Load the (signed) offset from the jump table.
6843 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6844
6845 // Add the offset to the address of the table base.
6846 __ addq(temp_reg, base_reg);
6847
6848 // And jump.
6849 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006850}
6851
Aart Bikc5d47542016-01-27 17:00:35 -08006852void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6853 if (value == 0) {
6854 __ xorl(dest, dest);
6855 } else {
6856 __ movl(dest, Immediate(value));
6857 }
6858}
6859
Mark Mendell92e83bf2015-05-07 11:25:03 -04006860void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6861 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006862 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006863 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006864 } else if (IsUint<32>(value)) {
6865 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006866 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6867 } else {
6868 __ movq(dest, Immediate(value));
6869 }
6870}
6871
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006872void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6873 if (value == 0) {
6874 __ xorps(dest, dest);
6875 } else {
6876 __ movss(dest, LiteralInt32Address(value));
6877 }
6878}
6879
6880void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6881 if (value == 0) {
6882 __ xorpd(dest, dest);
6883 } else {
6884 __ movsd(dest, LiteralInt64Address(value));
6885 }
6886}
6887
6888void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6889 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6890}
6891
6892void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6893 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6894}
6895
Aart Bika19616e2016-02-01 18:57:58 -08006896void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6897 if (value == 0) {
6898 __ testl(dest, dest);
6899 } else {
6900 __ cmpl(dest, Immediate(value));
6901 }
6902}
6903
6904void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6905 if (IsInt<32>(value)) {
6906 if (value == 0) {
6907 __ testq(dest, dest);
6908 } else {
6909 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6910 }
6911 } else {
6912 // Value won't fit in an int.
6913 __ cmpq(dest, LiteralInt64Address(value));
6914 }
6915}
6916
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006917void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6918 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006919 GenerateIntCompare(lhs_reg, rhs);
6920}
6921
6922void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006923 if (rhs.IsConstant()) {
6924 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006925 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006926 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006927 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006928 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006929 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006930 }
6931}
6932
6933void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6934 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6935 if (rhs.IsConstant()) {
6936 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6937 Compare64BitValue(lhs_reg, value);
6938 } else if (rhs.IsDoubleStackSlot()) {
6939 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6940 } else {
6941 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6942 }
6943}
6944
6945Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6946 Location index,
6947 ScaleFactor scale,
6948 uint32_t data_offset) {
6949 return index.IsConstant() ?
6950 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6951 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6952}
6953
Mark Mendellcfa410b2015-05-25 16:02:44 -04006954void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6955 DCHECK(dest.IsDoubleStackSlot());
6956 if (IsInt<32>(value)) {
6957 // Can move directly as an int32 constant.
6958 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6959 Immediate(static_cast<int32_t>(value)));
6960 } else {
6961 Load64BitValue(CpuRegister(TMP), value);
6962 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6963 }
6964}
6965
Mark Mendell9c86b482015-09-18 13:36:07 -04006966/**
6967 * Class to handle late fixup of offsets into constant area.
6968 */
6969class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6970 public:
6971 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6972 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6973
6974 protected:
6975 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6976
6977 CodeGeneratorX86_64* codegen_;
6978
6979 private:
6980 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6981 // Patch the correct offset for the instruction. We use the address of the
6982 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6983 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6984 int32_t relative_position = constant_offset - pos;
6985
6986 // Patch in the right value.
6987 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6988 }
6989
6990 // Location in constant area that the fixup refers to.
6991 size_t offset_into_constant_area_;
6992};
6993
6994/**
6995 t * Class to handle late fixup of offsets to a jump table that will be created in the
6996 * constant area.
6997 */
6998class JumpTableRIPFixup : public RIPFixup {
6999 public:
7000 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7001 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7002
7003 void CreateJumpTable() {
7004 X86_64Assembler* assembler = codegen_->GetAssembler();
7005
7006 // Ensure that the reference to the jump table has the correct offset.
7007 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7008 SetOffset(offset_in_constant_table);
7009
7010 // Compute the offset from the start of the function to this jump table.
7011 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7012
7013 // Populate the jump table with the correct values for the jump table.
7014 int32_t num_entries = switch_instr_->GetNumEntries();
7015 HBasicBlock* block = switch_instr_->GetBlock();
7016 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7017 // The value that we want is the target offset - the position of the table.
7018 for (int32_t i = 0; i < num_entries; i++) {
7019 HBasicBlock* b = successors[i];
7020 Label* l = codegen_->GetLabelOf(b);
7021 DCHECK(l->IsBound());
7022 int32_t offset_to_block = l->Position() - current_table_offset;
7023 assembler->AppendInt32(offset_to_block);
7024 }
7025 }
7026
7027 private:
7028 const HPackedSwitch* switch_instr_;
7029};
7030
Mark Mendellf55c3e02015-03-26 21:07:46 -04007031void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7032 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007033 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007034 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7035 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007036 assembler->Align(4, 0);
7037 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007038
7039 // Populate any jump tables.
7040 for (auto jump_table : fixups_to_jump_tables_) {
7041 jump_table->CreateJumpTable();
7042 }
7043
7044 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007045 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007046 }
7047
7048 // And finish up.
7049 CodeGenerator::Finalize(allocator);
7050}
7051
Mark Mendellf55c3e02015-03-26 21:07:46 -04007052Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7053 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7054 return Address::RIP(fixup);
7055}
7056
7057Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7058 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7059 return Address::RIP(fixup);
7060}
7061
7062Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7063 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7064 return Address::RIP(fixup);
7065}
7066
7067Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7068 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7069 return Address::RIP(fixup);
7070}
7071
Andreas Gampe85b62f22015-09-09 13:15:38 -07007072// TODO: trg as memory.
7073void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7074 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007075 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007076 return;
7077 }
7078
7079 DCHECK_NE(type, Primitive::kPrimVoid);
7080
7081 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7082 if (trg.Equals(return_loc)) {
7083 return;
7084 }
7085
7086 // Let the parallel move resolver take care of all of this.
7087 HParallelMove parallel_move(GetGraph()->GetArena());
7088 parallel_move.AddMove(return_loc, trg, type, nullptr);
7089 GetMoveResolver()->EmitNativeCode(&parallel_move);
7090}
7091
Mark Mendell9c86b482015-09-18 13:36:07 -04007092Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7093 // Create a fixup to be used to create and address the jump table.
7094 JumpTableRIPFixup* table_fixup =
7095 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7096
7097 // We have to populate the jump tables.
7098 fixups_to_jump_tables_.push_back(table_fixup);
7099 return Address::RIP(table_fixup);
7100}
7101
Mark Mendellea5af682015-10-22 17:35:49 -04007102void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7103 const Address& addr_high,
7104 int64_t v,
7105 HInstruction* instruction) {
7106 if (IsInt<32>(v)) {
7107 int32_t v_32 = v;
7108 __ movq(addr_low, Immediate(v_32));
7109 MaybeRecordImplicitNullCheck(instruction);
7110 } else {
7111 // Didn't fit in a register. Do it in pieces.
7112 int32_t low_v = Low32Bits(v);
7113 int32_t high_v = High32Bits(v);
7114 __ movl(addr_low, Immediate(low_v));
7115 MaybeRecordImplicitNullCheck(instruction);
7116 __ movl(addr_high, Immediate(high_v));
7117 }
7118}
7119
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007120void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7121 const uint8_t* roots_data,
7122 const PatchInfo<Label>& info,
7123 uint64_t index_in_table) const {
7124 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7125 uintptr_t address =
7126 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7127 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7128 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7129 dchecked_integral_cast<uint32_t>(address);
7130}
7131
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007132void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7133 for (const PatchInfo<Label>& info : jit_string_patches_) {
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007134 const auto& it = jit_string_roots_.find(
7135 StringReference(&info.dex_file, dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007136 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007137 PatchJitRootUse(code, roots_data, info, it->second);
7138 }
7139
7140 for (const PatchInfo<Label>& info : jit_class_patches_) {
7141 const auto& it = jit_class_roots_.find(
7142 TypeReference(&info.dex_file, dex::TypeIndex(info.index)));
7143 DCHECK(it != jit_class_roots_.end());
7144 PatchJitRootUse(code, roots_data, info, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007145 }
7146}
7147
Roland Levillain4d027112015-07-01 15:41:14 +01007148#undef __
7149
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007150} // namespace x86_64
7151} // namespace art