blob: 510eec4f30dd721f465bae8c6bb1c360a633113a [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070025#include "heap_poisoning.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080026#include "intrinsics.h"
27#include "intrinsics_x86_64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010028#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070029#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070030#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070031#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "mirror/object_reference.h"
33#include "thread.h"
34#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010035#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "utils/x86_64/assembler_x86_64.h"
37#include "utils/x86_64/managed_register_x86_64.h"
38
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010039namespace art {
40
Roland Levillain0d5a2812015-11-13 10:07:31 +000041template<class MirrorType>
42class GcRoot;
43
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010044namespace x86_64 {
45
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010046static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010047static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000048// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
49// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
50// generates less code/data with a small num_entries.
51static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010052
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000053static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000054static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010055
Mark Mendell24f2dfa2015-01-14 19:51:45 -050056static constexpr int kC2ConditionMask = 0x400;
57
Roland Levillain7cbd27f2016-08-11 23:53:33 +010058// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
59#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070060#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Andreas Gampe85b62f22015-09-09 13:15:38 -070062class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000064 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010065
Alexandre Rames2ed20af2015-03-06 13:55:35 +000066 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000067 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010068 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000069 if (instruction_->CanThrowIntoCatchBlock()) {
70 // Live registers will be restored in the catch block if caught.
71 SaveLiveRegisters(codegen, instruction_->GetLocations());
72 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010073 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000074 instruction_,
75 instruction_->GetDexPc(),
76 this);
Roland Levillain888d0672015-11-23 18:53:50 +000077 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010078 }
79
Alexandre Rames8158f282015-08-07 10:26:17 +010080 bool IsFatal() const OVERRIDE { return true; }
81
Alexandre Rames9931f312015-06-19 14:47:01 +010082 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
83
Nicolas Geoffraye5038322014-07-04 09:41:32 +010084 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010085 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
86};
87
Andreas Gampe85b62f22015-09-09 13:15:38 -070088class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000089 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000090 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000091
Alexandre Rames2ed20af2015-03-06 13:55:35 +000092 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000093 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000094 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010095 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000096 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000097 }
98
Alexandre Rames8158f282015-08-07 10:26:17 +010099 bool IsFatal() const OVERRIDE { return true; }
100
Alexandre Rames9931f312015-06-19 14:47:01 +0100101 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
102
Calin Juravled0d48522014-11-04 16:40:20 +0000103 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000104 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
105};
106
Andreas Gampe85b62f22015-09-09 13:15:38 -0700107class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000108 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100109 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000110 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000111
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000113 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100114 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 if (is_div_) {
116 __ negl(cpu_reg_);
117 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400118 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 }
120
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000121 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100122 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 if (is_div_) {
124 __ negq(cpu_reg_);
125 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400126 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000128 }
Calin Juravled0d48522014-11-04 16:40:20 +0000129 __ jmp(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
133
Calin Juravled0d48522014-11-04 16:40:20 +0000134 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100136 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 const bool is_div_;
138 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000139};
140
Andreas Gampe85b62f22015-09-09 13:15:38 -0700141class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000142 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100143 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000144 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000146 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bikb13c65b2017-03-21 20:14:07 -0700147 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000148 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000149 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700150 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100151 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000152 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700153 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 if (successor_ == nullptr) {
155 __ jmp(GetReturnLabel());
156 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000157 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100158 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000159 }
160
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100161 Label* GetReturnLabel() {
162 DCHECK(successor_ == nullptr);
163 return &return_label_;
164 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100166 HBasicBlock* GetSuccessor() const {
167 return successor_;
168 }
169
Alexandre Rames9931f312015-06-19 14:47:01 +0100170 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
171
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000172 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100173 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000174 Label return_label_;
175
176 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
177};
178
Andreas Gampe85b62f22015-09-09 13:15:38 -0700179class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100181 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000182 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100183
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000184 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100185 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000186 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100187 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000188 if (instruction_->CanThrowIntoCatchBlock()) {
189 // Live registers will be restored in the catch block if caught.
190 SaveLiveRegisters(codegen, instruction_->GetLocations());
191 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400192 // Are we using an array length from memory?
193 HInstruction* array_length = instruction_->InputAt(1);
194 Location length_loc = locations->InAt(1);
195 InvokeRuntimeCallingConvention calling_convention;
196 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
197 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100198 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100199 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400200 Location array_loc = array_length->GetLocations()->InAt(0);
201 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
202 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
203 // Check for conflicts with index.
204 if (length_loc.Equals(locations->InAt(0))) {
205 // We know we aren't using parameter 2.
206 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
207 }
208 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100209 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100210 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700211 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 }
213
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000214 // We're moving two locations to locations that could overlap, so we need a parallel
215 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000216 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100217 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000218 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100219 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400220 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100221 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100222 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100223 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
224 ? kQuickThrowStringBounds
225 : kQuickThrowArrayBounds;
226 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100227 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000228 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100229 }
230
Alexandre Rames8158f282015-08-07 10:26:17 +0100231 bool IsFatal() const OVERRIDE { return true; }
232
Alexandre Rames9931f312015-06-19 14:47:01 +0100233 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
234
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100235 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100236 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
237};
238
Andreas Gampe85b62f22015-09-09 13:15:38 -0700239class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000241 LoadClassSlowPathX86_64(HLoadClass* cls,
242 HInstruction* at,
243 uint32_t dex_pc,
244 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000245 : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000246 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
247 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000249 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000250 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000251 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100253
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000254 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255
Vladimir Markoea4c1262017-02-06 19:59:33 +0000256 // Custom calling convention: RAX serves as both input and output.
257 __ movl(CpuRegister(RAX), Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100258 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000259 instruction_,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000260 dex_pc_,
261 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000262 if (do_clinit_) {
263 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
264 } else {
265 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
266 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100267
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000268 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000270 if (out.IsValid()) {
271 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000272 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000273 }
274
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000275 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100276 __ jmp(GetExitLabel());
277 }
278
Alexandre Rames9931f312015-06-19 14:47:01 +0100279 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
280
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100281 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000282 // The class this slow path will load.
283 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100284
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000285 // The dex PC of `at_`.
286 const uint32_t dex_pc_;
287
288 // Whether to initialize the class.
289 const bool do_clinit_;
290
291 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100292};
293
Vladimir Markoaad75c62016-10-03 08:46:48 +0000294class LoadStringSlowPathX86_64 : public SlowPathCode {
295 public:
296 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
297
298 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
299 LocationSummary* locations = instruction_->GetLocations();
300 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
301
302 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
303 __ Bind(GetEntryLabel());
304 SaveLiveRegisters(codegen, locations);
305
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000306 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100307 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000308 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000309 x86_64_codegen->InvokeRuntime(kQuickResolveString,
310 instruction_,
311 instruction_->GetDexPc(),
312 this);
313 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
314 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
315 RestoreLiveRegisters(codegen, locations);
316
Vladimir Markoaad75c62016-10-03 08:46:48 +0000317 __ jmp(GetExitLabel());
318 }
319
320 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
321
322 private:
323 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
324};
325
Andreas Gampe85b62f22015-09-09 13:15:38 -0700326class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000327 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000328 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000329 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000331 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100333 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000334 DCHECK(instruction_->IsCheckCast()
335 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336
Roland Levillain0d5a2812015-11-13 10:07:31 +0000337 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000338 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000339
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000340 if (kPoisonHeapReferences &&
341 instruction_->IsCheckCast() &&
342 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
343 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
344 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
345 }
346
Vladimir Marko87584542017-12-12 17:47:52 +0000347 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000348 SaveLiveRegisters(codegen, locations);
349 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000350
351 // We're moving two locations to locations that could overlap, so we need a parallel
352 // move resolver.
353 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800354 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800355 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100356 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800357 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800358 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100359 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000360 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100361 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800362 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 } else {
364 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800365 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
366 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000367 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000368
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000369 if (!is_fatal_) {
370 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000371 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000373
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000374 RestoreLiveRegisters(codegen, locations);
375 __ jmp(GetExitLabel());
376 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000377 }
378
Alexandre Rames9931f312015-06-19 14:47:01 +0100379 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
380
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000381 bool IsFatal() const OVERRIDE { return is_fatal_; }
382
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000383 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000384 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000385
386 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
387};
388
Andreas Gampe85b62f22015-09-09 13:15:38 -0700389class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700390 public:
Aart Bik42249c32016-01-07 15:33:50 -0800391 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000392 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393
394 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000395 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700396 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100397 LocationSummary* locations = instruction_->GetLocations();
398 SaveLiveRegisters(codegen, locations);
399 InvokeRuntimeCallingConvention calling_convention;
400 x86_64_codegen->Load32BitValue(
401 CpuRegister(calling_convention.GetRegisterAt(0)),
402 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100403 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100404 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700405 }
406
Alexandre Rames9931f312015-06-19 14:47:01 +0100407 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
408
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700409 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700410 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
411};
412
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100413class ArraySetSlowPathX86_64 : public SlowPathCode {
414 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000415 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100416
417 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
418 LocationSummary* locations = instruction_->GetLocations();
419 __ Bind(GetEntryLabel());
420 SaveLiveRegisters(codegen, locations);
421
422 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100423 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424 parallel_move.AddMove(
425 locations->InAt(0),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100427 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100428 nullptr);
429 parallel_move.AddMove(
430 locations->InAt(1),
431 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100432 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100433 nullptr);
434 parallel_move.AddMove(
435 locations->InAt(2),
436 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100437 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100438 nullptr);
439 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
440
Roland Levillain0d5a2812015-11-13 10:07:31 +0000441 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100442 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000443 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 RestoreLiveRegisters(codegen, locations);
445 __ jmp(GetExitLabel());
446 }
447
448 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
449
450 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100451 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
452};
453
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100454// Slow path marking an object reference `ref` during a read
455// barrier. The field `obj.field` in the object `obj` holding this
456// reference does not get updated by this slow path after marking (see
457// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
458//
459// This means that after the execution of this slow path, `ref` will
460// always be up-to-date, but `obj.field` may not; i.e., after the
461// flip, `ref` will be a to-space reference, but `obj.field` will
462// probably still be a from-space reference (unless it gets updated by
463// another thread, or if another thread installed another object
464// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000465class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
466 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100467 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
468 Location ref,
469 bool unpoison_ref_before_marking)
470 : SlowPathCode(instruction),
471 ref_(ref),
472 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000473 DCHECK(kEmitCompilerReadBarrier);
474 }
475
476 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
477
478 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
479 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
481 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000482 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100483 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 DCHECK(instruction_->IsInstanceFieldGet() ||
485 instruction_->IsStaticFieldGet() ||
486 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100487 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488 instruction_->IsLoadClass() ||
489 instruction_->IsLoadString() ||
490 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100491 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100492 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
493 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000494 << "Unexpected instruction in read barrier marking slow path: "
495 << instruction_->DebugName();
496
497 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100498 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000499 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100500 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000501 }
Roland Levillain4359e612016-07-20 11:32:19 +0100502 // No need to save live registers; it's taken care of by the
503 // entrypoint. Also, there is no need to update the stack mask,
504 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000505 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100506 DCHECK_NE(ref_reg, RSP);
507 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100508 // "Compact" slow path, saving two moves.
509 //
510 // Instead of using the standard runtime calling convention (input
511 // and output in R0):
512 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100513 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100514 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100515 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100516 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100517 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100518 // of a dedicated entrypoint:
519 //
520 // rX <- ReadBarrierMarkRegX(rX)
521 //
522 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100523 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100524 // This runtime call does not require a stack map.
525 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000526 __ jmp(GetExitLabel());
527 }
528
529 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100530 // The location (register) of the marked object reference.
531 const Location ref_;
532 // Should the reference in `ref_` be unpoisoned prior to marking it?
533 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000534
535 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
536};
537
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100538// Slow path marking an object reference `ref` during a read barrier,
539// and if needed, atomically updating the field `obj.field` in the
540// object `obj` holding this reference after marking (contrary to
541// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
542// `obj.field`).
543//
544// This means that after the execution of this slow path, both `ref`
545// and `obj.field` will be up-to-date; i.e., after the flip, both will
546// hold the same to-space reference (unless another thread installed
547// another object reference (different from `ref`) in `obj.field`).
548class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
549 public:
550 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
551 Location ref,
552 CpuRegister obj,
553 const Address& field_addr,
554 bool unpoison_ref_before_marking,
555 CpuRegister temp1,
556 CpuRegister temp2)
557 : SlowPathCode(instruction),
558 ref_(ref),
559 obj_(obj),
560 field_addr_(field_addr),
561 unpoison_ref_before_marking_(unpoison_ref_before_marking),
562 temp1_(temp1),
563 temp2_(temp2) {
564 DCHECK(kEmitCompilerReadBarrier);
565 }
566
567 const char* GetDescription() const OVERRIDE {
568 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
569 }
570
571 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
572 LocationSummary* locations = instruction_->GetLocations();
573 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
574 Register ref_reg = ref_cpu_reg.AsRegister();
575 DCHECK(locations->CanCall());
576 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
577 // This slow path is only used by the UnsafeCASObject intrinsic.
578 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
579 << "Unexpected instruction in read barrier marking and field updating slow path: "
580 << instruction_->DebugName();
581 DCHECK(instruction_->GetLocations()->Intrinsified());
582 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
583
584 __ Bind(GetEntryLabel());
585 if (unpoison_ref_before_marking_) {
586 // Object* ref = ref_addr->AsMirrorPtr()
587 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
588 }
589
590 // Save the old (unpoisoned) reference.
591 __ movl(temp1_, ref_cpu_reg);
592
593 // No need to save live registers; it's taken care of by the
594 // entrypoint. Also, there is no need to update the stack mask,
595 // as this runtime call will not trigger a garbage collection.
596 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
597 DCHECK_NE(ref_reg, RSP);
598 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
599 // "Compact" slow path, saving two moves.
600 //
601 // Instead of using the standard runtime calling convention (input
602 // and output in R0):
603 //
604 // RDI <- ref
605 // RAX <- ReadBarrierMark(RDI)
606 // ref <- RAX
607 //
608 // we just use rX (the register containing `ref`) as input and output
609 // of a dedicated entrypoint:
610 //
611 // rX <- ReadBarrierMarkRegX(rX)
612 //
613 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100614 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100615 // This runtime call does not require a stack map.
616 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
617
618 // If the new reference is different from the old reference,
619 // update the field in the holder (`*field_addr`).
620 //
621 // Note that this field could also hold a different object, if
622 // another thread had concurrently changed it. In that case, the
623 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
624 // operation below would abort the CAS, leaving the field as-is.
625 NearLabel done;
626 __ cmpl(temp1_, ref_cpu_reg);
627 __ j(kEqual, &done);
628
629 // Update the the holder's field atomically. This may fail if
630 // mutator updates before us, but it's OK. This is achived
631 // using a strong compare-and-set (CAS) operation with relaxed
632 // memory synchronization ordering, where the expected value is
633 // the old reference and the desired value is the new reference.
634 // This operation is implemented with a 32-bit LOCK CMPXLCHG
635 // instruction, which requires the expected value (the old
636 // reference) to be in EAX. Save RAX beforehand, and move the
637 // expected value (stored in `temp1_`) into EAX.
638 __ movq(temp2_, CpuRegister(RAX));
639 __ movl(CpuRegister(RAX), temp1_);
640
641 // Convenience aliases.
642 CpuRegister base = obj_;
643 CpuRegister expected = CpuRegister(RAX);
644 CpuRegister value = ref_cpu_reg;
645
646 bool base_equals_value = (base.AsRegister() == value.AsRegister());
647 Register value_reg = ref_reg;
648 if (kPoisonHeapReferences) {
649 if (base_equals_value) {
650 // If `base` and `value` are the same register location, move
651 // `value_reg` to a temporary register. This way, poisoning
652 // `value_reg` won't invalidate `base`.
653 value_reg = temp1_.AsRegister();
654 __ movl(CpuRegister(value_reg), base);
655 }
656
657 // Check that the register allocator did not assign the location
658 // of `expected` (RAX) to `value` nor to `base`, so that heap
659 // poisoning (when enabled) works as intended below.
660 // - If `value` were equal to `expected`, both references would
661 // be poisoned twice, meaning they would not be poisoned at
662 // all, as heap poisoning uses address negation.
663 // - If `base` were equal to `expected`, poisoning `expected`
664 // would invalidate `base`.
665 DCHECK_NE(value_reg, expected.AsRegister());
666 DCHECK_NE(base.AsRegister(), expected.AsRegister());
667
668 __ PoisonHeapReference(expected);
669 __ PoisonHeapReference(CpuRegister(value_reg));
670 }
671
672 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
673
674 // If heap poisoning is enabled, we need to unpoison the values
675 // that were poisoned earlier.
676 if (kPoisonHeapReferences) {
677 if (base_equals_value) {
678 // `value_reg` has been moved to a temporary register, no need
679 // to unpoison it.
680 } else {
681 __ UnpoisonHeapReference(CpuRegister(value_reg));
682 }
683 // No need to unpoison `expected` (RAX), as it is be overwritten below.
684 }
685
686 // Restore RAX.
687 __ movq(CpuRegister(RAX), temp2_);
688
689 __ Bind(&done);
690 __ jmp(GetExitLabel());
691 }
692
693 private:
694 // The location (register) of the marked object reference.
695 const Location ref_;
696 // The register containing the object holding the marked object reference field.
697 const CpuRegister obj_;
698 // The address of the marked reference field. The base of this address must be `obj_`.
699 const Address field_addr_;
700
701 // Should the reference in `ref_` be unpoisoned prior to marking it?
702 const bool unpoison_ref_before_marking_;
703
704 const CpuRegister temp1_;
705 const CpuRegister temp2_;
706
707 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
708};
709
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710// Slow path generating a read barrier for a heap reference.
711class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
712 public:
713 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
714 Location out,
715 Location ref,
716 Location obj,
717 uint32_t offset,
718 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000719 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000720 out_(out),
721 ref_(ref),
722 obj_(obj),
723 offset_(offset),
724 index_(index) {
725 DCHECK(kEmitCompilerReadBarrier);
726 // If `obj` is equal to `out` or `ref`, it means the initial
727 // object has been overwritten by (or after) the heap object
728 // reference load to be instrumented, e.g.:
729 //
730 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000731 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000732 //
733 // In that case, we have lost the information about the original
734 // object, and the emitted read barrier cannot work properly.
735 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
736 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
737}
738
739 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
740 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
741 LocationSummary* locations = instruction_->GetLocations();
742 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
743 DCHECK(locations->CanCall());
744 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100745 DCHECK(instruction_->IsInstanceFieldGet() ||
746 instruction_->IsStaticFieldGet() ||
747 instruction_->IsArrayGet() ||
748 instruction_->IsInstanceOf() ||
749 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700750 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000751 << "Unexpected instruction in read barrier for heap reference slow path: "
752 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000753
754 __ Bind(GetEntryLabel());
755 SaveLiveRegisters(codegen, locations);
756
757 // We may have to change the index's value, but as `index_` is a
758 // constant member (like other "inputs" of this slow path),
759 // introduce a copy of it, `index`.
760 Location index = index_;
761 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100762 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000763 if (instruction_->IsArrayGet()) {
764 // Compute real offset and store it in index_.
765 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
766 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
767 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
768 // We are about to change the value of `index_reg` (see the
769 // calls to art::x86_64::X86_64Assembler::shll and
770 // art::x86_64::X86_64Assembler::AddImmediate below), but it
771 // has not been saved by the previous call to
772 // art::SlowPathCode::SaveLiveRegisters, as it is a
773 // callee-save register --
774 // art::SlowPathCode::SaveLiveRegisters does not consider
775 // callee-save registers, as it has been designed with the
776 // assumption that callee-save registers are supposed to be
777 // handled by the called function. So, as a callee-save
778 // register, `index_reg` _would_ eventually be saved onto
779 // the stack, but it would be too late: we would have
780 // changed its value earlier. Therefore, we manually save
781 // it here into another freely available register,
782 // `free_reg`, chosen of course among the caller-save
783 // registers (as a callee-save `free_reg` register would
784 // exhibit the same problem).
785 //
786 // Note we could have requested a temporary register from
787 // the register allocator instead; but we prefer not to, as
788 // this is a slow path, and we know we can find a
789 // caller-save register that is available.
790 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
791 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
792 index_reg = free_reg;
793 index = Location::RegisterLocation(index_reg);
794 } else {
795 // The initial register stored in `index_` has already been
796 // saved in the call to art::SlowPathCode::SaveLiveRegisters
797 // (as it is not a callee-save register), so we can freely
798 // use it.
799 }
800 // Shifting the index value contained in `index_reg` by the
801 // scale factor (2) cannot overflow in practice, as the
802 // runtime is unable to allocate object arrays with a size
803 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
804 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
805 static_assert(
806 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
807 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
808 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
809 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100810 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
811 // intrinsics, `index_` is not shifted by a scale factor of 2
812 // (as in the case of ArrayGet), as it is actually an offset
813 // to an object field within an object.
814 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000815 DCHECK(instruction_->GetLocations()->Intrinsified());
816 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
817 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
818 << instruction_->AsInvoke()->GetIntrinsic();
819 DCHECK_EQ(offset_, 0U);
820 DCHECK(index_.IsRegister());
821 }
822 }
823
824 // We're moving two or three locations to locations that could
825 // overlap, so we need a parallel move resolver.
826 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100827 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000828 parallel_move.AddMove(ref_,
829 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100830 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000831 nullptr);
832 parallel_move.AddMove(obj_,
833 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100834 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000835 nullptr);
836 if (index.IsValid()) {
837 parallel_move.AddMove(index,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000840 nullptr);
841 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
842 } else {
843 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
844 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
845 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100846 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000847 instruction_,
848 instruction_->GetDexPc(),
849 this);
850 CheckEntrypointTypes<
851 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
852 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
853
854 RestoreLiveRegisters(codegen, locations);
855 __ jmp(GetExitLabel());
856 }
857
858 const char* GetDescription() const OVERRIDE {
859 return "ReadBarrierForHeapReferenceSlowPathX86_64";
860 }
861
862 private:
863 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
864 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
865 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
866 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
867 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
868 return static_cast<CpuRegister>(i);
869 }
870 }
871 // We shall never fail to find a free caller-save register, as
872 // there are more than two core caller-save registers on x86-64
873 // (meaning it is possible to find one which is different from
874 // `ref` and `obj`).
875 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
876 LOG(FATAL) << "Could not find a free caller-save register";
877 UNREACHABLE();
878 }
879
Roland Levillain0d5a2812015-11-13 10:07:31 +0000880 const Location out_;
881 const Location ref_;
882 const Location obj_;
883 const uint32_t offset_;
884 // An additional location containing an index to an array.
885 // Only used for HArrayGet and the UnsafeGetObject &
886 // UnsafeGetObjectVolatile intrinsics.
887 const Location index_;
888
889 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
890};
891
892// Slow path generating a read barrier for a GC root.
893class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
894 public:
895 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000896 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000897 DCHECK(kEmitCompilerReadBarrier);
898 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000899
900 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
901 LocationSummary* locations = instruction_->GetLocations();
902 DCHECK(locations->CanCall());
903 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000904 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
905 << "Unexpected instruction in read barrier for GC root slow path: "
906 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000907
908 __ Bind(GetEntryLabel());
909 SaveLiveRegisters(codegen, locations);
910
911 InvokeRuntimeCallingConvention calling_convention;
912 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
913 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100914 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000915 instruction_,
916 instruction_->GetDexPc(),
917 this);
918 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
919 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
920
921 RestoreLiveRegisters(codegen, locations);
922 __ jmp(GetExitLabel());
923 }
924
925 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
926
927 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000928 const Location out_;
929 const Location root_;
930
931 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
932};
933
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100934#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100935// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
936#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100937
Roland Levillain4fa13f62015-07-06 18:11:54 +0100938inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700939 switch (cond) {
940 case kCondEQ: return kEqual;
941 case kCondNE: return kNotEqual;
942 case kCondLT: return kLess;
943 case kCondLE: return kLessEqual;
944 case kCondGT: return kGreater;
945 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700946 case kCondB: return kBelow;
947 case kCondBE: return kBelowEqual;
948 case kCondA: return kAbove;
949 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700950 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100951 LOG(FATAL) << "Unreachable";
952 UNREACHABLE();
953}
954
Aart Bike9f37602015-10-09 11:15:55 -0700955// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100956inline Condition X86_64FPCondition(IfCondition cond) {
957 switch (cond) {
958 case kCondEQ: return kEqual;
959 case kCondNE: return kNotEqual;
960 case kCondLT: return kBelow;
961 case kCondLE: return kBelowEqual;
962 case kCondGT: return kAbove;
963 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700964 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800965 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100966 LOG(FATAL) << "Unreachable";
967 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700968}
969
Vladimir Markodc151b22015-10-15 18:02:30 +0100970HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
971 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100972 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000973 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100974}
975
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100976void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
977 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800978 // All registers are assumed to be correctly set up.
Vladimir Marko4ee8e292017-06-02 15:39:30 +0000979
Vladimir Marko58155012015-08-19 12:49:41 +0000980 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
981 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100982 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000983 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100984 uint32_t offset =
985 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
986 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000987 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100988 }
Vladimir Marko58155012015-08-19 12:49:41 +0000989 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000990 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000991 break;
Vladimir Marko65979462017-05-19 17:25:12 +0100992 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
993 DCHECK(GetCompilerOptions().IsBootImage());
994 __ leal(temp.AsRegister<CpuRegister>(),
995 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
996 RecordBootMethodPatch(invoke);
997 break;
Vladimir Marko58155012015-08-19 12:49:41 +0000998 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Vladimir Marko2d73f332017-03-16 15:55:49 +0000999 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
Vladimir Marko58155012015-08-19 12:49:41 +00001000 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001001 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001002 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001003 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001004 // Bind a new fixup label at the end of the "movl" insn.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001005 __ Bind(NewMethodBssEntryPatch(
1006 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex())));
Vladimir Marko58155012015-08-19 12:49:41 +00001007 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001008 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001009 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
1010 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1011 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001012 }
Vladimir Marko58155012015-08-19 12:49:41 +00001013 }
1014
1015 switch (invoke->GetCodePtrLocation()) {
1016 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1017 __ call(&frame_entry_label_);
1018 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001019 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1020 // (callee_method + offset_of_quick_compiled_code)()
1021 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1022 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001023 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001024 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001025 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001026 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001027
1028 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001029}
1030
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001031void CodeGeneratorX86_64::GenerateVirtualCall(
1032 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001033 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1034 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1035 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001036
1037 // Use the calling convention instead of the location of the receiver, as
1038 // intrinsics may have put the receiver in a different register. In the intrinsics
1039 // slow path, the arguments have been moved to the right place, so here we are
1040 // guaranteed that the receiver is the first register of the calling convention.
1041 InvokeDexCallingConvention calling_convention;
1042 Register receiver = calling_convention.GetRegisterAt(0);
1043
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001044 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001045 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001046 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001047 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001048 // Instead of simply (possibly) unpoisoning `temp` here, we should
1049 // emit a read barrier for the previous class reference load.
1050 // However this is not required in practice, as this is an
1051 // intermediate/temporary reference and because the current
1052 // concurrent copying collector keeps the from-space memory
1053 // intact/accessible until the end of the marking phase (the
1054 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001055 __ MaybeUnpoisonHeapReference(temp);
1056 // temp = temp->GetMethodAt(method_offset);
1057 __ movq(temp, Address(temp, method_offset));
1058 // call temp->GetEntryPoint();
1059 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001060 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001061 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001062}
1063
Vladimir Marko65979462017-05-19 17:25:12 +01001064void CodeGeneratorX86_64::RecordBootMethodPatch(HInvokeStaticOrDirect* invoke) {
1065 boot_image_method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001066 invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001067 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001068}
1069
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001070Label* CodeGeneratorX86_64::NewMethodBssEntryPatch(MethodReference target_method) {
1071 // Add a patch entry and return the label.
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001072 method_bss_entry_patches_.emplace_back(*target_method.dex_file, target_method.index);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001073 return &method_bss_entry_patches_.back().label;
1074}
1075
Vladimir Marko1998cd02017-01-13 13:02:58 +00001076void CodeGeneratorX86_64::RecordBootTypePatch(HLoadClass* load_class) {
1077 boot_image_type_patches_.emplace_back(load_class->GetDexFile(),
1078 load_class->GetTypeIndex().index_);
1079 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001080}
1081
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001082Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00001083 type_bss_entry_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
1084 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001085}
1086
Vladimir Marko65979462017-05-19 17:25:12 +01001087void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
Vladimir Marko65979462017-05-19 17:25:12 +01001088 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
1089 __ Bind(&string_patches_.back().label);
1090}
1091
Vladimir Markoaad75c62016-10-03 08:46:48 +00001092Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1093 DCHECK(!GetCompilerOptions().IsBootImage());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001094 string_bss_entry_patches_.emplace_back(
1095 load_string->GetDexFile(), load_string->GetStringIndex().index_);
1096 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001097}
1098
Vladimir Markoaad75c62016-10-03 08:46:48 +00001099// The label points to the end of the "movl" or another instruction but the literal offset
1100// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1101constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1102
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001103template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001104inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1105 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001106 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001107 for (const PatchInfo<Label>& info : infos) {
1108 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1109 linker_patches->push_back(
1110 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1111 }
1112}
1113
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001114void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001115 DCHECK(linker_patches->empty());
1116 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001117 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001118 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001119 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001120 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001121 string_patches_.size() +
1122 string_bss_entry_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001123 linker_patches->reserve(size);
Vladimir Marko764d4542017-05-16 10:31:41 +01001124 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001125 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1126 boot_image_method_patches_, linker_patches);
1127 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1128 boot_image_type_patches_, linker_patches);
1129 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
1130 string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001131 } else {
Vladimir Marko65979462017-05-19 17:25:12 +01001132 DCHECK(boot_image_method_patches_.empty());
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001133 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
1134 boot_image_type_patches_, linker_patches);
1135 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
1136 string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001137 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001138 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1139 method_bss_entry_patches_, linker_patches);
1140 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1141 type_bss_entry_patches_, linker_patches);
1142 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1143 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001144 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001145}
1146
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001147void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001148 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001149}
1150
1151void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001152 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001153}
1154
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001155size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1156 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1157 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001158}
1159
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001160size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1161 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1162 return kX86_64WordSize;
1163}
1164
1165size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001166 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001167 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001168 } else {
1169 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1170 }
1171 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001172}
1173
1174size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001175 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001176 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001177 } else {
1178 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1179 }
1180 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001181}
1182
Calin Juravle175dc732015-08-25 15:42:32 +01001183void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1184 HInstruction* instruction,
1185 uint32_t dex_pc,
1186 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001187 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001188 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1189 if (EntrypointRequiresStackMap(entrypoint)) {
1190 RecordPcInfo(instruction, dex_pc, slow_path);
1191 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001192}
1193
Roland Levillaindec8f632016-07-22 17:10:06 +01001194void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1195 HInstruction* instruction,
1196 SlowPathCode* slow_path) {
1197 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001198 GenerateInvokeRuntime(entry_point_offset);
1199}
1200
1201void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001202 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1203}
1204
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001205static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001206// Use a fake return address register to mimic Quick.
1207static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001208CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001209 const X86_64InstructionSetFeatures& isa_features,
1210 const CompilerOptions& compiler_options,
1211 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001212 : CodeGenerator(graph,
1213 kNumberOfCpuRegisters,
1214 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001215 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001216 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1217 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001218 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001219 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1220 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001221 compiler_options,
1222 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001223 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001224 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001225 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001226 move_resolver_(graph->GetAllocator(), this),
1227 assembler_(graph->GetAllocator()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001228 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001229 constant_area_start_(0),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001230 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1231 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1232 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1233 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1234 string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1235 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1236 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1237 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1238 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001239 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1240}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001241
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001242InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1243 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001244 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001245 assembler_(codegen->GetAssembler()),
1246 codegen_(codegen) {}
1247
David Brazdil58282f42016-01-14 12:45:10 +00001248void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001249 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001250 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001251
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001252 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001253 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001254}
1255
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001256static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001257 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001258}
David Srbecky9d8606d2015-04-12 09:35:32 +01001259
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001260static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001261 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001262}
1263
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001264void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001265 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001266 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001267 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001268 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001269 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001270
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001271 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001272 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1273 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001274 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001275 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001276
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001277 if (HasEmptyFrame()) {
1278 return;
1279 }
1280
Nicolas Geoffray98893962015-01-21 12:32:32 +00001281 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001282 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001283 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001284 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001285 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1286 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001287 }
1288 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001289
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001290 int adjust = GetFrameSize() - GetCoreSpillSize();
1291 __ subq(CpuRegister(RSP), Immediate(adjust));
1292 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001293 uint32_t xmm_spill_location = GetFpuSpillStart();
1294 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001295
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001296 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1297 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001298 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1299 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1300 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001301 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001302 }
1303
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001304 // Save the current method if we need it. Note that we do not
1305 // do this in HCurrentMethod, as the instruction might have been removed
1306 // in the SSA graph.
1307 if (RequiresCurrentMethod()) {
1308 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1309 CpuRegister(kMethodRegisterArgument));
1310 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001311
1312 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1313 // Initialize should_deoptimize flag to 0.
1314 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1315 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001316}
1317
1318void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001319 __ cfi().RememberState();
1320 if (!HasEmptyFrame()) {
1321 uint32_t xmm_spill_location = GetFpuSpillStart();
1322 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1323 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1324 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1325 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1326 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1327 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1328 }
1329 }
1330
1331 int adjust = GetFrameSize() - GetCoreSpillSize();
1332 __ addq(CpuRegister(RSP), Immediate(adjust));
1333 __ cfi().AdjustCFAOffset(-adjust);
1334
1335 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1336 Register reg = kCoreCalleeSaves[i];
1337 if (allocated_registers_.ContainsCoreRegister(reg)) {
1338 __ popq(CpuRegister(reg));
1339 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1340 __ cfi().Restore(DWARFReg(reg));
1341 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001342 }
1343 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001344 __ ret();
1345 __ cfi().RestoreState();
1346 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001347}
1348
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001349void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1350 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001351}
1352
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001353void CodeGeneratorX86_64::Move(Location destination, Location source) {
1354 if (source.Equals(destination)) {
1355 return;
1356 }
1357 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001358 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001359 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001360 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001361 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001362 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001363 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001364 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1365 } else if (source.IsConstant()) {
1366 HConstant* constant = source.GetConstant();
1367 if (constant->IsLongConstant()) {
1368 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1369 } else {
1370 Load32BitValue(dest, GetInt32ValueOf(constant));
1371 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001372 } else {
1373 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001374 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001375 }
1376 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001377 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001378 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001379 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001380 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001381 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1382 } else if (source.IsConstant()) {
1383 HConstant* constant = source.GetConstant();
1384 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1385 if (constant->IsFloatConstant()) {
1386 Load32BitValue(dest, static_cast<int32_t>(value));
1387 } else {
1388 Load64BitValue(dest, value);
1389 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001390 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001391 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001392 } else {
1393 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001394 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001395 }
1396 } else if (destination.IsStackSlot()) {
1397 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001398 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001399 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001400 } else if (source.IsFpuRegister()) {
1401 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001402 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001403 } else if (source.IsConstant()) {
1404 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001405 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001406 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001407 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001408 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001409 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1410 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001411 }
1412 } else {
1413 DCHECK(destination.IsDoubleStackSlot());
1414 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001415 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001416 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001417 } else if (source.IsFpuRegister()) {
1418 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001419 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001420 } else if (source.IsConstant()) {
1421 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001422 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1423 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001424 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001425 } else {
1426 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001427 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1428 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001429 }
1430 }
1431}
1432
Calin Juravle175dc732015-08-25 15:42:32 +01001433void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1434 DCHECK(location.IsRegister());
1435 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1436}
1437
Calin Juravlee460d1d2015-09-29 04:52:17 +01001438void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001439 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001440 Move(dst, src);
1441}
1442
1443void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1444 if (location.IsRegister()) {
1445 locations->AddTemp(location);
1446 } else {
1447 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1448 }
1449}
1450
David Brazdilfc6a86a2015-06-26 10:33:45 +00001451void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001452 if (successor->IsExitBlock()) {
1453 DCHECK(got->GetPrevious()->AlwaysThrows());
1454 return; // no code needed
1455 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001456
1457 HBasicBlock* block = got->GetBlock();
1458 HInstruction* previous = got->GetPrevious();
1459
1460 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001461 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001462 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1463 return;
1464 }
1465
1466 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1467 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1468 }
1469 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001470 __ jmp(codegen_->GetLabelOf(successor));
1471 }
1472}
1473
David Brazdilfc6a86a2015-06-26 10:33:45 +00001474void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1475 got->SetLocations(nullptr);
1476}
1477
1478void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1479 HandleGoto(got, got->GetSuccessor());
1480}
1481
1482void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1483 try_boundary->SetLocations(nullptr);
1484}
1485
1486void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1487 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1488 if (!successor->IsExitBlock()) {
1489 HandleGoto(try_boundary, successor);
1490 }
1491}
1492
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001493void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1494 exit->SetLocations(nullptr);
1495}
1496
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001497void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001498}
1499
Mark Mendell152408f2015-12-31 12:28:50 -05001500template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001501void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001502 LabelType* true_label,
1503 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001504 if (cond->IsFPConditionTrueIfNaN()) {
1505 __ j(kUnordered, true_label);
1506 } else if (cond->IsFPConditionFalseIfNaN()) {
1507 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001508 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001509 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001510}
1511
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001512void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001513 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001514
Mark Mendellc4701932015-04-10 13:18:51 -04001515 Location left = locations->InAt(0);
1516 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001517 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001518 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001519 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001520 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001521 case DataType::Type::kInt8:
1522 case DataType::Type::kUint16:
1523 case DataType::Type::kInt16:
1524 case DataType::Type::kInt32:
1525 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001526 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001527 break;
1528 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001529 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001530 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001531 break;
1532 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001533 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001534 if (right.IsFpuRegister()) {
1535 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1536 } else if (right.IsConstant()) {
1537 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1538 codegen_->LiteralFloatAddress(
1539 right.GetConstant()->AsFloatConstant()->GetValue()));
1540 } else {
1541 DCHECK(right.IsStackSlot());
1542 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1543 Address(CpuRegister(RSP), right.GetStackIndex()));
1544 }
Mark Mendellc4701932015-04-10 13:18:51 -04001545 break;
1546 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001547 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001548 if (right.IsFpuRegister()) {
1549 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1550 } else if (right.IsConstant()) {
1551 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1552 codegen_->LiteralDoubleAddress(
1553 right.GetConstant()->AsDoubleConstant()->GetValue()));
1554 } else {
1555 DCHECK(right.IsDoubleStackSlot());
1556 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1557 Address(CpuRegister(RSP), right.GetStackIndex()));
1558 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001559 break;
1560 }
1561 default:
1562 LOG(FATAL) << "Unexpected condition type " << type;
1563 }
1564}
1565
1566template<class LabelType>
1567void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1568 LabelType* true_target_in,
1569 LabelType* false_target_in) {
1570 // Generated branching requires both targets to be explicit. If either of the
1571 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1572 LabelType fallthrough_target;
1573 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1574 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1575
1576 // Generate the comparison to set the CC.
1577 GenerateCompareTest(condition);
1578
1579 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001580 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001581 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001582 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001583 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1584 break;
1585 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001586 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001587 GenerateFPJumps(condition, true_target, false_target);
1588 break;
1589 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001590 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001591 GenerateFPJumps(condition, true_target, false_target);
1592 break;
1593 }
1594 default:
1595 LOG(FATAL) << "Unexpected condition type " << type;
1596 }
1597
David Brazdil0debae72015-11-12 18:37:00 +00001598 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001599 __ jmp(false_target);
1600 }
David Brazdil0debae72015-11-12 18:37:00 +00001601
1602 if (fallthrough_target.IsLinked()) {
1603 __ Bind(&fallthrough_target);
1604 }
Mark Mendellc4701932015-04-10 13:18:51 -04001605}
1606
David Brazdil0debae72015-11-12 18:37:00 +00001607static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1608 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1609 // are set only strictly before `branch`. We can't use the eflags on long
1610 // conditions if they are materialized due to the complex branching.
1611 return cond->IsCondition() &&
1612 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001613 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001614}
1615
Mark Mendell152408f2015-12-31 12:28:50 -05001616template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001617void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001618 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001619 LabelType* true_target,
1620 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001621 HInstruction* cond = instruction->InputAt(condition_input_index);
1622
1623 if (true_target == nullptr && false_target == nullptr) {
1624 // Nothing to do. The code always falls through.
1625 return;
1626 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001627 // Constant condition, statically compared against "true" (integer value 1).
1628 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001629 if (true_target != nullptr) {
1630 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001631 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001632 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001633 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001634 if (false_target != nullptr) {
1635 __ jmp(false_target);
1636 }
1637 }
1638 return;
1639 }
1640
1641 // The following code generates these patterns:
1642 // (1) true_target == nullptr && false_target != nullptr
1643 // - opposite condition true => branch to false_target
1644 // (2) true_target != nullptr && false_target == nullptr
1645 // - condition true => branch to true_target
1646 // (3) true_target != nullptr && false_target != nullptr
1647 // - condition true => branch to true_target
1648 // - branch to false_target
1649 if (IsBooleanValueOrMaterializedCondition(cond)) {
1650 if (AreEflagsSetFrom(cond, instruction)) {
1651 if (true_target == nullptr) {
1652 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1653 } else {
1654 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1655 }
1656 } else {
1657 // Materialized condition, compare against 0.
1658 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1659 if (lhs.IsRegister()) {
1660 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1661 } else {
1662 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1663 }
1664 if (true_target == nullptr) {
1665 __ j(kEqual, false_target);
1666 } else {
1667 __ j(kNotEqual, true_target);
1668 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001669 }
1670 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001671 // Condition has not been materialized, use its inputs as the
1672 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001673 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001674
David Brazdil0debae72015-11-12 18:37:00 +00001675 // If this is a long or FP comparison that has been folded into
1676 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001677 DataType::Type type = condition->InputAt(0)->GetType();
1678 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001679 GenerateCompareTestAndBranch(condition, true_target, false_target);
1680 return;
1681 }
1682
1683 Location lhs = condition->GetLocations()->InAt(0);
1684 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001685 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001686 if (true_target == nullptr) {
1687 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1688 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001689 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001690 }
Dave Allison20dfc792014-06-16 20:44:29 -07001691 }
David Brazdil0debae72015-11-12 18:37:00 +00001692
1693 // If neither branch falls through (case 3), the conditional branch to `true_target`
1694 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1695 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001696 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001697 }
1698}
1699
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001700void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001701 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001702 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001703 locations->SetInAt(0, Location::Any());
1704 }
1705}
1706
1707void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001708 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1709 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1710 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1711 nullptr : codegen_->GetLabelOf(true_successor);
1712 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1713 nullptr : codegen_->GetLabelOf(false_successor);
1714 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001715}
1716
1717void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001718 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001719 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001720 InvokeRuntimeCallingConvention calling_convention;
1721 RegisterSet caller_saves = RegisterSet::Empty();
1722 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1723 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001724 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001725 locations->SetInAt(0, Location::Any());
1726 }
1727}
1728
1729void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001730 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001731 GenerateTestAndBranch<Label>(deoptimize,
1732 /* condition_input_index */ 0,
1733 slow_path->GetEntryLabel(),
1734 /* false_target */ nullptr);
1735}
1736
Mingyao Yang063fc772016-08-02 11:02:54 -07001737void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001738 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001739 LocationSummary(flag, LocationSummary::kNoCall);
1740 locations->SetOut(Location::RequiresRegister());
1741}
1742
1743void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1744 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1745 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1746}
1747
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001748static bool SelectCanUseCMOV(HSelect* select) {
1749 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001750 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001751 return false;
1752 }
1753
1754 // A FP condition doesn't generate the single CC that we need.
1755 HInstruction* condition = select->GetCondition();
1756 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001757 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001758 return false;
1759 }
1760
1761 // We can generate a CMOV for this Select.
1762 return true;
1763}
1764
David Brazdil74eb1b22015-12-14 11:44:01 +00001765void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001766 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001767 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001768 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001769 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001770 } else {
1771 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001772 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001773 if (select->InputAt(1)->IsConstant()) {
1774 locations->SetInAt(1, Location::RequiresRegister());
1775 } else {
1776 locations->SetInAt(1, Location::Any());
1777 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001778 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001779 locations->SetInAt(1, Location::Any());
1780 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001781 }
1782 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1783 locations->SetInAt(2, Location::RequiresRegister());
1784 }
1785 locations->SetOut(Location::SameAsFirstInput());
1786}
1787
1788void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1789 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001790 if (SelectCanUseCMOV(select)) {
1791 // If both the condition and the source types are integer, we can generate
1792 // a CMOV to implement Select.
1793 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001794 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001795 DCHECK(locations->InAt(0).Equals(locations->Out()));
1796
1797 HInstruction* select_condition = select->GetCondition();
1798 Condition cond = kNotEqual;
1799
1800 // Figure out how to test the 'condition'.
1801 if (select_condition->IsCondition()) {
1802 HCondition* condition = select_condition->AsCondition();
1803 if (!condition->IsEmittedAtUseSite()) {
1804 // This was a previously materialized condition.
1805 // Can we use the existing condition code?
1806 if (AreEflagsSetFrom(condition, select)) {
1807 // Materialization was the previous instruction. Condition codes are right.
1808 cond = X86_64IntegerCondition(condition->GetCondition());
1809 } else {
1810 // No, we have to recreate the condition code.
1811 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1812 __ testl(cond_reg, cond_reg);
1813 }
1814 } else {
1815 GenerateCompareTest(condition);
1816 cond = X86_64IntegerCondition(condition->GetCondition());
1817 }
1818 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001819 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001820 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1821 __ testl(cond_reg, cond_reg);
1822 }
1823
1824 // If the condition is true, overwrite the output, which already contains false.
1825 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001826 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001827 if (value_true_loc.IsRegister()) {
1828 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1829 } else {
1830 __ cmov(cond,
1831 value_false,
1832 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1833 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001834 } else {
1835 NearLabel false_target;
1836 GenerateTestAndBranch<NearLabel>(select,
1837 /* condition_input_index */ 2,
1838 /* true_target */ nullptr,
1839 &false_target);
1840 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1841 __ Bind(&false_target);
1842 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001843}
1844
David Srbecky0cf44932015-12-09 14:09:59 +00001845void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001846 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001847}
1848
David Srbeckyd28f4a02016-03-14 17:14:24 +00001849void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1850 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001851}
1852
1853void CodeGeneratorX86_64::GenerateNop() {
1854 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001855}
1856
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001857void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001858 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001859 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001860 // Handle the long/FP comparisons made in instruction simplification.
1861 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001862 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001863 locations->SetInAt(0, Location::RequiresRegister());
1864 locations->SetInAt(1, Location::Any());
1865 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001866 case DataType::Type::kFloat32:
1867 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04001868 locations->SetInAt(0, Location::RequiresFpuRegister());
1869 locations->SetInAt(1, Location::Any());
1870 break;
1871 default:
1872 locations->SetInAt(0, Location::RequiresRegister());
1873 locations->SetInAt(1, Location::Any());
1874 break;
1875 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001876 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001877 locations->SetOut(Location::RequiresRegister());
1878 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001879}
1880
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001881void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001882 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001883 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001884 }
Mark Mendellc4701932015-04-10 13:18:51 -04001885
1886 LocationSummary* locations = cond->GetLocations();
1887 Location lhs = locations->InAt(0);
1888 Location rhs = locations->InAt(1);
1889 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001890 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001891
1892 switch (cond->InputAt(0)->GetType()) {
1893 default:
1894 // Integer case.
1895
1896 // Clear output register: setcc only sets the low byte.
1897 __ xorl(reg, reg);
1898
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001899 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001900 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001901 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001902 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001903 // Clear output register: setcc only sets the low byte.
1904 __ xorl(reg, reg);
1905
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001906 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001907 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001908 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001909 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001910 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1911 if (rhs.IsConstant()) {
1912 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1913 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1914 } else if (rhs.IsStackSlot()) {
1915 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1916 } else {
1917 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1918 }
1919 GenerateFPJumps(cond, &true_label, &false_label);
1920 break;
1921 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001922 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001923 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1924 if (rhs.IsConstant()) {
1925 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1926 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1927 } else if (rhs.IsDoubleStackSlot()) {
1928 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1929 } else {
1930 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1931 }
1932 GenerateFPJumps(cond, &true_label, &false_label);
1933 break;
1934 }
1935 }
1936
1937 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001938 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001939
Roland Levillain4fa13f62015-07-06 18:11:54 +01001940 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001941 __ Bind(&false_label);
1942 __ xorl(reg, reg);
1943 __ jmp(&done_label);
1944
Roland Levillain4fa13f62015-07-06 18:11:54 +01001945 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001946 __ Bind(&true_label);
1947 __ movl(reg, Immediate(1));
1948 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001949}
1950
1951void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001952 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001953}
1954
1955void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001956 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001957}
1958
1959void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001960 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001961}
1962
1963void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001964 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001965}
1966
1967void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001968 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001969}
1970
1971void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001972 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001973}
1974
1975void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001976 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001977}
1978
1979void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001980 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001981}
1982
1983void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001984 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001985}
1986
1987void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001988 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001989}
1990
1991void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001992 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001993}
1994
1995void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001996 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001997}
1998
Aart Bike9f37602015-10-09 11:15:55 -07001999void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002000 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002001}
2002
2003void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002004 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002005}
2006
2007void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002008 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002009}
2010
2011void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002012 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002013}
2014
2015void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002016 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002017}
2018
2019void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002020 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002021}
2022
2023void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002024 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002025}
2026
2027void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002028 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002029}
2030
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002031void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002032 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002033 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002034 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002035 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002036 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002037 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002038 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002039 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002040 case DataType::Type::kInt32:
2041 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002042 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002043 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002044 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2045 break;
2046 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002047 case DataType::Type::kFloat32:
2048 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002049 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002050 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002051 locations->SetOut(Location::RequiresRegister());
2052 break;
2053 }
2054 default:
2055 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2056 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002057}
2058
2059void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002060 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002061 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002062 Location left = locations->InAt(0);
2063 Location right = locations->InAt(1);
2064
Mark Mendell0c9497d2015-08-21 09:30:05 -04002065 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002066 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002067 Condition less_cond = kLess;
2068
Calin Juravleddb7df22014-11-25 20:56:51 +00002069 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002070 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002071 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002072 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002073 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002074 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002075 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002076 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002077 break;
2078 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002079 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002080 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002081 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002082 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002083 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002084 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2085 if (right.IsConstant()) {
2086 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2087 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2088 } else if (right.IsStackSlot()) {
2089 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2090 } else {
2091 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2092 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002093 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002094 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002095 break;
2096 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002097 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002098 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2099 if (right.IsConstant()) {
2100 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2101 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2102 } else if (right.IsDoubleStackSlot()) {
2103 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2104 } else {
2105 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2106 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002107 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002108 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002109 break;
2110 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002111 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002112 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002113 }
Aart Bika19616e2016-02-01 18:57:58 -08002114
Calin Juravleddb7df22014-11-25 20:56:51 +00002115 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002116 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002117 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002118
Calin Juravle91debbc2014-11-26 19:01:09 +00002119 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002120 __ movl(out, Immediate(1));
2121 __ jmp(&done);
2122
2123 __ Bind(&less);
2124 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002125
2126 __ Bind(&done);
2127}
2128
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002129void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002130 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002131 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002132 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002133}
2134
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002135void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002136 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002137}
2138
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002139void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2140 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002141 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002142 locations->SetOut(Location::ConstantLocation(constant));
2143}
2144
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002145void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002146 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002147}
2148
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002149void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002150 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002151 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002152 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002153}
2154
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002155void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002156 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002157}
2158
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002159void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2160 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002161 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002162 locations->SetOut(Location::ConstantLocation(constant));
2163}
2164
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002165void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002166 // Will be generated at use site.
2167}
2168
2169void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2170 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002171 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002172 locations->SetOut(Location::ConstantLocation(constant));
2173}
2174
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002175void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2176 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002177 // Will be generated at use site.
2178}
2179
Igor Murashkind01745e2017-04-05 16:40:31 -07002180void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2181 constructor_fence->SetLocations(nullptr);
2182}
2183
2184void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2185 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2186 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2187}
2188
Calin Juravle27df7582015-04-17 19:12:31 +01002189void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2190 memory_barrier->SetLocations(nullptr);
2191}
2192
2193void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002194 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002195}
2196
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002197void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2198 ret->SetLocations(nullptr);
2199}
2200
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002201void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002202 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002203}
2204
2205void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002206 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002207 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002208 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002209 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002210 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002211 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002212 case DataType::Type::kInt8:
2213 case DataType::Type::kUint16:
2214 case DataType::Type::kInt16:
2215 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002216 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002217 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002218 break;
2219
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002220 case DataType::Type::kFloat32:
2221 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002222 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002223 break;
2224
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002225 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002226 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002227 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002228}
2229
2230void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2231 if (kIsDebugBuild) {
2232 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002233 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002234 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002235 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002236 case DataType::Type::kInt8:
2237 case DataType::Type::kUint16:
2238 case DataType::Type::kInt16:
2239 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002240 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002241 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002242 break;
2243
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002244 case DataType::Type::kFloat32:
2245 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002246 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002247 XMM0);
2248 break;
2249
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002250 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002251 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002252 }
2253 }
2254 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002255}
2256
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002257Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002258 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002259 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002260 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002261 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002262 case DataType::Type::kInt8:
2263 case DataType::Type::kUint16:
2264 case DataType::Type::kInt16:
2265 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002266 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002267 return Location::RegisterLocation(RAX);
2268
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002269 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002270 return Location::NoLocation();
2271
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002272 case DataType::Type::kFloat64:
2273 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002274 return Location::FpuRegisterLocation(XMM0);
2275 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002276
2277 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002278}
2279
2280Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2281 return Location::RegisterLocation(kMethodRegisterArgument);
2282}
2283
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002284Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002285 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002286 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002287 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002288 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002289 case DataType::Type::kInt8:
2290 case DataType::Type::kUint16:
2291 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002292 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002293 uint32_t index = gp_index_++;
2294 stack_index_++;
2295 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002296 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002297 } else {
2298 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2299 }
2300 }
2301
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002302 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002303 uint32_t index = gp_index_;
2304 stack_index_ += 2;
2305 if (index < calling_convention.GetNumberOfRegisters()) {
2306 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002307 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002308 } else {
2309 gp_index_ += 2;
2310 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2311 }
2312 }
2313
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002314 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002315 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002316 stack_index_++;
2317 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002318 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002319 } else {
2320 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2321 }
2322 }
2323
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002324 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002325 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002326 stack_index_ += 2;
2327 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002328 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002329 } else {
2330 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2331 }
2332 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002333
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002334 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002335 LOG(FATAL) << "Unexpected parameter type " << type;
2336 break;
2337 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002338 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002339}
2340
Calin Juravle175dc732015-08-25 15:42:32 +01002341void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2342 // The trampoline uses the same calling convention as dex calling conventions,
2343 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2344 // the method_idx.
2345 HandleInvoke(invoke);
2346}
2347
2348void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2349 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2350}
2351
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002352void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002353 // Explicit clinit checks triggered by static invokes must have been pruned by
2354 // art::PrepareForRegisterAllocation.
2355 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002356
Mark Mendellfb8d2792015-03-31 22:16:59 -04002357 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002358 if (intrinsic.TryDispatch(invoke)) {
2359 return;
2360 }
2361
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002362 HandleInvoke(invoke);
2363}
2364
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002365static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2366 if (invoke->GetLocations()->Intrinsified()) {
2367 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2368 intrinsic.Dispatch(invoke);
2369 return true;
2370 }
2371 return false;
2372}
2373
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002374void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002375 // Explicit clinit checks triggered by static invokes must have been pruned by
2376 // art::PrepareForRegisterAllocation.
2377 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002378
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002379 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2380 return;
2381 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002382
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002383 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002384 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002385 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002386}
2387
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002388void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002389 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002390 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002391}
2392
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002393void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002394 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002395 if (intrinsic.TryDispatch(invoke)) {
2396 return;
2397 }
2398
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002399 HandleInvoke(invoke);
2400}
2401
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002402void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002403 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2404 return;
2405 }
2406
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002407 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002408 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002409}
2410
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002411void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2412 HandleInvoke(invoke);
2413 // Add the hidden argument.
2414 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2415}
2416
2417void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2418 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002419 LocationSummary* locations = invoke->GetLocations();
2420 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2421 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002422 Location receiver = locations->InAt(0);
2423 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2424
Roland Levillain0d5a2812015-11-13 10:07:31 +00002425 // Set the hidden argument. This is safe to do this here, as RAX
2426 // won't be modified thereafter, before the `call` instruction.
2427 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002428 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002429
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002430 if (receiver.IsStackSlot()) {
2431 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002432 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002433 __ movl(temp, Address(temp, class_offset));
2434 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002435 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002436 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002437 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002438 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002439 // Instead of simply (possibly) unpoisoning `temp` here, we should
2440 // emit a read barrier for the previous class reference load.
2441 // However this is not required in practice, as this is an
2442 // intermediate/temporary reference and because the current
2443 // concurrent copying collector keeps the from-space memory
2444 // intact/accessible until the end of the marking phase (the
2445 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002446 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002447 // temp = temp->GetAddressOfIMT()
2448 __ movq(temp,
2449 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2450 // temp = temp->GetImtEntryAt(method_offset);
2451 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002452 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002453 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002454 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002455 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002456 __ call(Address(
2457 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002458
2459 DCHECK(!codegen_->IsLeafMethod());
2460 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2461}
2462
Orion Hodsonac141392017-01-13 11:53:47 +00002463void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2464 HandleInvoke(invoke);
2465}
2466
2467void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2468 codegen_->GenerateInvokePolymorphicCall(invoke);
2469}
2470
Roland Levillain88cb1752014-10-20 16:36:47 +01002471void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2472 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002473 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002474 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002475 case DataType::Type::kInt32:
2476 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002477 locations->SetInAt(0, Location::RequiresRegister());
2478 locations->SetOut(Location::SameAsFirstInput());
2479 break;
2480
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002481 case DataType::Type::kFloat32:
2482 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002483 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002484 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002485 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002486 break;
2487
2488 default:
2489 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2490 }
2491}
2492
2493void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2494 LocationSummary* locations = neg->GetLocations();
2495 Location out = locations->Out();
2496 Location in = locations->InAt(0);
2497 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002498 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002499 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002500 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002501 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002502 break;
2503
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002504 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002505 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002506 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002507 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002508 break;
2509
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002510 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002511 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002512 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002513 // Implement float negation with an exclusive or with value
2514 // 0x80000000 (mask for bit 31, representing the sign of a
2515 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002516 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002517 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002518 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002519 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002520
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002521 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002522 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002523 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002524 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002525 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002526 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002527 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002528 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002529 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002530 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002531
2532 default:
2533 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2534 }
2535}
2536
Roland Levillaindff1f282014-11-05 14:15:05 +00002537void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2538 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002539 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002540 DataType::Type result_type = conversion->GetResultType();
2541 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002542 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2543 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002544
Roland Levillaindff1f282014-11-05 14:15:05 +00002545 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002546 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002547 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002548 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002549 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002550 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2551 locations->SetInAt(0, Location::Any());
2552 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002553 break;
2554
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002555 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002556 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002557 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002558 locations->SetInAt(0, Location::Any());
2559 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2560 break;
2561
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002562 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002563 locations->SetInAt(0, Location::RequiresFpuRegister());
2564 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002565 break;
2566
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002567 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002568 locations->SetInAt(0, Location::RequiresFpuRegister());
2569 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002570 break;
2571
2572 default:
2573 LOG(FATAL) << "Unexpected type conversion from " << input_type
2574 << " to " << result_type;
2575 }
2576 break;
2577
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002578 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002579 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002580 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002581 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002582 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002583 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002584 case DataType::Type::kInt16:
2585 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002586 // TODO: We would benefit from a (to-be-implemented)
2587 // Location::RegisterOrStackSlot requirement for this input.
2588 locations->SetInAt(0, Location::RequiresRegister());
2589 locations->SetOut(Location::RequiresRegister());
2590 break;
2591
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002592 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002593 locations->SetInAt(0, Location::RequiresFpuRegister());
2594 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002595 break;
2596
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002597 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002598 locations->SetInAt(0, Location::RequiresFpuRegister());
2599 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002600 break;
2601
2602 default:
2603 LOG(FATAL) << "Unexpected type conversion from " << input_type
2604 << " to " << result_type;
2605 }
2606 break;
2607
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002608 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002609 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002610 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002611 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002612 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002613 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002614 case DataType::Type::kInt16:
2615 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002616 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002617 locations->SetOut(Location::RequiresFpuRegister());
2618 break;
2619
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002620 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002621 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002622 locations->SetOut(Location::RequiresFpuRegister());
2623 break;
2624
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002625 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002626 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002627 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002628 break;
2629
2630 default:
2631 LOG(FATAL) << "Unexpected type conversion from " << input_type
2632 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002633 }
Roland Levillaincff13742014-11-17 14:32:17 +00002634 break;
2635
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002636 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002637 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002638 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002639 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002640 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002641 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002642 case DataType::Type::kInt16:
2643 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002644 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002645 locations->SetOut(Location::RequiresFpuRegister());
2646 break;
2647
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002648 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002649 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002650 locations->SetOut(Location::RequiresFpuRegister());
2651 break;
2652
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002653 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002654 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002655 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002656 break;
2657
2658 default:
2659 LOG(FATAL) << "Unexpected type conversion from " << input_type
2660 << " to " << result_type;
2661 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002662 break;
2663
2664 default:
2665 LOG(FATAL) << "Unexpected type conversion from " << input_type
2666 << " to " << result_type;
2667 }
2668}
2669
2670void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2671 LocationSummary* locations = conversion->GetLocations();
2672 Location out = locations->Out();
2673 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002674 DataType::Type result_type = conversion->GetResultType();
2675 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002676 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2677 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002678 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002679 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002680 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002681 case DataType::Type::kInt8:
2682 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002683 case DataType::Type::kInt16:
2684 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002685 case DataType::Type::kInt64:
2686 if (in.IsRegister()) {
2687 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2688 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2689 __ movzxb(out.AsRegister<CpuRegister>(),
2690 Address(CpuRegister(RSP), in.GetStackIndex()));
2691 } else {
2692 __ movl(out.AsRegister<CpuRegister>(),
2693 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2694 }
2695 break;
2696
2697 default:
2698 LOG(FATAL) << "Unexpected type conversion from " << input_type
2699 << " to " << result_type;
2700 }
2701 break;
2702
2703 case DataType::Type::kInt8:
2704 switch (input_type) {
2705 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002706 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002707 case DataType::Type::kInt16:
2708 case DataType::Type::kInt32:
2709 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002710 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002711 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002712 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002713 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002714 Address(CpuRegister(RSP), in.GetStackIndex()));
2715 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002716 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002717 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002718 }
2719 break;
2720
2721 default:
2722 LOG(FATAL) << "Unexpected type conversion from " << input_type
2723 << " to " << result_type;
2724 }
2725 break;
2726
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002727 case DataType::Type::kUint16:
2728 switch (input_type) {
2729 case DataType::Type::kInt8:
2730 case DataType::Type::kInt16:
2731 case DataType::Type::kInt32:
2732 case DataType::Type::kInt64:
2733 if (in.IsRegister()) {
2734 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2735 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2736 __ movzxw(out.AsRegister<CpuRegister>(),
2737 Address(CpuRegister(RSP), in.GetStackIndex()));
2738 } else {
2739 __ movl(out.AsRegister<CpuRegister>(),
2740 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2741 }
2742 break;
2743
2744 default:
2745 LOG(FATAL) << "Unexpected type conversion from " << input_type
2746 << " to " << result_type;
2747 }
2748 break;
2749
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002750 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002751 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002752 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002753 case DataType::Type::kInt32:
2754 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00002755 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002756 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002757 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002758 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002759 Address(CpuRegister(RSP), in.GetStackIndex()));
2760 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002761 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002762 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002763 }
2764 break;
2765
2766 default:
2767 LOG(FATAL) << "Unexpected type conversion from " << input_type
2768 << " to " << result_type;
2769 }
2770 break;
2771
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002772 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002773 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002774 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002775 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002776 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002777 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002778 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002779 Address(CpuRegister(RSP), in.GetStackIndex()));
2780 } else {
2781 DCHECK(in.IsConstant());
2782 DCHECK(in.GetConstant()->IsLongConstant());
2783 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002784 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002785 }
2786 break;
2787
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002788 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002789 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2790 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002791 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002792
2793 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002794 // if input >= (float)INT_MAX goto done
2795 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002796 __ j(kAboveEqual, &done);
2797 // if input == NaN goto nan
2798 __ j(kUnordered, &nan);
2799 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002800 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002801 __ jmp(&done);
2802 __ Bind(&nan);
2803 // output = 0
2804 __ xorl(output, output);
2805 __ Bind(&done);
2806 break;
2807 }
2808
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002809 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002810 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2811 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002812 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002813
2814 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002815 // if input >= (double)INT_MAX goto done
2816 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002817 __ j(kAboveEqual, &done);
2818 // if input == NaN goto nan
2819 __ j(kUnordered, &nan);
2820 // output = double-to-int-truncate(input)
2821 __ cvttsd2si(output, input);
2822 __ jmp(&done);
2823 __ Bind(&nan);
2824 // output = 0
2825 __ xorl(output, output);
2826 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002827 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002828 }
Roland Levillain946e1432014-11-11 17:35:19 +00002829
2830 default:
2831 LOG(FATAL) << "Unexpected type conversion from " << input_type
2832 << " to " << result_type;
2833 }
2834 break;
2835
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002836 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002837 switch (input_type) {
2838 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002839 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002840 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002841 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002842 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002843 case DataType::Type::kInt16:
2844 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002845 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002846 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002847 break;
2848
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002849 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00002850 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2851 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002852 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002853
Mark Mendell92e83bf2015-05-07 11:25:03 -04002854 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002855 // if input >= (float)LONG_MAX goto done
2856 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002857 __ j(kAboveEqual, &done);
2858 // if input == NaN goto nan
2859 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002860 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002861 __ cvttss2si(output, input, true);
2862 __ jmp(&done);
2863 __ Bind(&nan);
2864 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002865 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002866 __ Bind(&done);
2867 break;
2868 }
2869
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002870 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002871 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2872 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002873 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002874
Mark Mendell92e83bf2015-05-07 11:25:03 -04002875 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002876 // if input >= (double)LONG_MAX goto done
2877 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002878 __ j(kAboveEqual, &done);
2879 // if input == NaN goto nan
2880 __ j(kUnordered, &nan);
2881 // output = double-to-long-truncate(input)
2882 __ cvttsd2si(output, input, true);
2883 __ jmp(&done);
2884 __ Bind(&nan);
2885 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002886 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002887 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002888 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002889 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002890
2891 default:
2892 LOG(FATAL) << "Unexpected type conversion from " << input_type
2893 << " to " << result_type;
2894 }
2895 break;
2896
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002897 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002898 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002899 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002900 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002901 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002902 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002903 case DataType::Type::kInt16:
2904 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002905 if (in.IsRegister()) {
2906 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2907 } else if (in.IsConstant()) {
2908 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2909 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002910 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002911 } else {
2912 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2913 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2914 }
Roland Levillaincff13742014-11-17 14:32:17 +00002915 break;
2916
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002917 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002918 if (in.IsRegister()) {
2919 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2920 } else if (in.IsConstant()) {
2921 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2922 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002923 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002924 } else {
2925 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2926 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2927 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002928 break;
2929
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002930 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002931 if (in.IsFpuRegister()) {
2932 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2933 } else if (in.IsConstant()) {
2934 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2935 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002936 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002937 } else {
2938 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2939 Address(CpuRegister(RSP), in.GetStackIndex()));
2940 }
Roland Levillaincff13742014-11-17 14:32:17 +00002941 break;
2942
2943 default:
2944 LOG(FATAL) << "Unexpected type conversion from " << input_type
2945 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002946 }
Roland Levillaincff13742014-11-17 14:32:17 +00002947 break;
2948
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002949 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002950 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002951 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002952 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002953 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002954 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002955 case DataType::Type::kInt16:
2956 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002957 if (in.IsRegister()) {
2958 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2959 } else if (in.IsConstant()) {
2960 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2961 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002962 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002963 } else {
2964 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2965 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2966 }
Roland Levillaincff13742014-11-17 14:32:17 +00002967 break;
2968
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002969 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002970 if (in.IsRegister()) {
2971 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2972 } else if (in.IsConstant()) {
2973 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2974 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002975 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002976 } else {
2977 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2978 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2979 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002980 break;
2981
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002982 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002983 if (in.IsFpuRegister()) {
2984 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2985 } else if (in.IsConstant()) {
2986 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2987 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002988 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002989 } else {
2990 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2991 Address(CpuRegister(RSP), in.GetStackIndex()));
2992 }
Roland Levillaincff13742014-11-17 14:32:17 +00002993 break;
2994
2995 default:
2996 LOG(FATAL) << "Unexpected type conversion from " << input_type
2997 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002998 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002999 break;
3000
3001 default:
3002 LOG(FATAL) << "Unexpected type conversion from " << input_type
3003 << " to " << result_type;
3004 }
3005}
3006
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003007void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003008 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003009 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003010 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003011 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003012 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003013 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3014 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003015 break;
3016 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003017
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003018 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003019 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003020 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003021 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003022 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003023 break;
3024 }
3025
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003026 case DataType::Type::kFloat64:
3027 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003028 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003029 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003030 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003031 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003032 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003033
3034 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003035 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003036 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037}
3038
3039void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3040 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003041 Location first = locations->InAt(0);
3042 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003043 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003044
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003045 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003046 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003047 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003048 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3049 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003050 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3051 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003052 } else {
3053 __ leal(out.AsRegister<CpuRegister>(), Address(
3054 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3055 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003056 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003057 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3058 __ addl(out.AsRegister<CpuRegister>(),
3059 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3060 } else {
3061 __ leal(out.AsRegister<CpuRegister>(), Address(
3062 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3063 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003064 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003065 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003066 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003067 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003068 break;
3069 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003070
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003071 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003072 if (second.IsRegister()) {
3073 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3074 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003075 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3076 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003077 } else {
3078 __ leaq(out.AsRegister<CpuRegister>(), Address(
3079 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3080 }
3081 } else {
3082 DCHECK(second.IsConstant());
3083 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3084 int32_t int32_value = Low32Bits(value);
3085 DCHECK_EQ(int32_value, value);
3086 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3087 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3088 } else {
3089 __ leaq(out.AsRegister<CpuRegister>(), Address(
3090 first.AsRegister<CpuRegister>(), int32_value));
3091 }
3092 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003093 break;
3094 }
3095
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003096 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003097 if (second.IsFpuRegister()) {
3098 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3099 } else if (second.IsConstant()) {
3100 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003101 codegen_->LiteralFloatAddress(
3102 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003103 } else {
3104 DCHECK(second.IsStackSlot());
3105 __ addss(first.AsFpuRegister<XmmRegister>(),
3106 Address(CpuRegister(RSP), second.GetStackIndex()));
3107 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003108 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003109 }
3110
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003111 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003112 if (second.IsFpuRegister()) {
3113 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3114 } else if (second.IsConstant()) {
3115 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003116 codegen_->LiteralDoubleAddress(
3117 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003118 } else {
3119 DCHECK(second.IsDoubleStackSlot());
3120 __ addsd(first.AsFpuRegister<XmmRegister>(),
3121 Address(CpuRegister(RSP), second.GetStackIndex()));
3122 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003123 break;
3124 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003125
3126 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003127 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003128 }
3129}
3130
3131void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003132 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003133 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003134 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003135 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003136 locations->SetInAt(0, Location::RequiresRegister());
3137 locations->SetInAt(1, Location::Any());
3138 locations->SetOut(Location::SameAsFirstInput());
3139 break;
3140 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003141 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003142 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003143 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003144 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003145 break;
3146 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003147 case DataType::Type::kFloat32:
3148 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003149 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003150 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003151 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003152 break;
Calin Juravle11351682014-10-23 15:38:15 +01003153 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003154 default:
Calin Juravle11351682014-10-23 15:38:15 +01003155 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003156 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003157}
3158
3159void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3160 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003161 Location first = locations->InAt(0);
3162 Location second = locations->InAt(1);
3163 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003164 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003165 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003166 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003167 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003168 } else if (second.IsConstant()) {
3169 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003170 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003171 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003172 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003173 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003174 break;
3175 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003176 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003177 if (second.IsConstant()) {
3178 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3179 DCHECK(IsInt<32>(value));
3180 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3181 } else {
3182 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3183 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003184 break;
3185 }
3186
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003187 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003188 if (second.IsFpuRegister()) {
3189 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3190 } else if (second.IsConstant()) {
3191 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003192 codegen_->LiteralFloatAddress(
3193 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003194 } else {
3195 DCHECK(second.IsStackSlot());
3196 __ subss(first.AsFpuRegister<XmmRegister>(),
3197 Address(CpuRegister(RSP), second.GetStackIndex()));
3198 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003199 break;
Calin Juravle11351682014-10-23 15:38:15 +01003200 }
3201
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003202 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003203 if (second.IsFpuRegister()) {
3204 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3205 } else if (second.IsConstant()) {
3206 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003207 codegen_->LiteralDoubleAddress(
3208 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003209 } else {
3210 DCHECK(second.IsDoubleStackSlot());
3211 __ subsd(first.AsFpuRegister<XmmRegister>(),
3212 Address(CpuRegister(RSP), second.GetStackIndex()));
3213 }
Calin Juravle11351682014-10-23 15:38:15 +01003214 break;
3215 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003216
3217 default:
Calin Juravle11351682014-10-23 15:38:15 +01003218 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003219 }
3220}
3221
Calin Juravle34bacdf2014-10-07 20:23:36 +01003222void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3223 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003224 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003225 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003226 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003227 locations->SetInAt(0, Location::RequiresRegister());
3228 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003229 if (mul->InputAt(1)->IsIntConstant()) {
3230 // Can use 3 operand multiply.
3231 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3232 } else {
3233 locations->SetOut(Location::SameAsFirstInput());
3234 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003235 break;
3236 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003237 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003238 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003239 locations->SetInAt(1, Location::Any());
3240 if (mul->InputAt(1)->IsLongConstant() &&
3241 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003242 // Can use 3 operand multiply.
3243 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3244 } else {
3245 locations->SetOut(Location::SameAsFirstInput());
3246 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003247 break;
3248 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003249 case DataType::Type::kFloat32:
3250 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003251 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003252 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003253 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003254 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003255 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003256
3257 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003258 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003259 }
3260}
3261
3262void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3263 LocationSummary* locations = mul->GetLocations();
3264 Location first = locations->InAt(0);
3265 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003266 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003267 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003268 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003269 // The constant may have ended up in a register, so test explicitly to avoid
3270 // problems where the output may not be the same as the first operand.
3271 if (mul->InputAt(1)->IsIntConstant()) {
3272 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3273 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3274 } else if (second.IsRegister()) {
3275 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003276 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003277 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003278 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003279 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003280 __ imull(first.AsRegister<CpuRegister>(),
3281 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003282 }
3283 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003284 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003285 // The constant may have ended up in a register, so test explicitly to avoid
3286 // problems where the output may not be the same as the first operand.
3287 if (mul->InputAt(1)->IsLongConstant()) {
3288 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3289 if (IsInt<32>(value)) {
3290 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3291 Immediate(static_cast<int32_t>(value)));
3292 } else {
3293 // Have to use the constant area.
3294 DCHECK(first.Equals(out));
3295 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3296 }
3297 } else if (second.IsRegister()) {
3298 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003299 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003300 } else {
3301 DCHECK(second.IsDoubleStackSlot());
3302 DCHECK(first.Equals(out));
3303 __ imulq(first.AsRegister<CpuRegister>(),
3304 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003305 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003306 break;
3307 }
3308
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003309 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003310 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003311 if (second.IsFpuRegister()) {
3312 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3313 } else if (second.IsConstant()) {
3314 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003315 codegen_->LiteralFloatAddress(
3316 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003317 } else {
3318 DCHECK(second.IsStackSlot());
3319 __ mulss(first.AsFpuRegister<XmmRegister>(),
3320 Address(CpuRegister(RSP), second.GetStackIndex()));
3321 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003322 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003323 }
3324
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003325 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003326 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003327 if (second.IsFpuRegister()) {
3328 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3329 } else if (second.IsConstant()) {
3330 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003331 codegen_->LiteralDoubleAddress(
3332 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003333 } else {
3334 DCHECK(second.IsDoubleStackSlot());
3335 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3336 Address(CpuRegister(RSP), second.GetStackIndex()));
3337 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003338 break;
3339 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003340
3341 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003342 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003343 }
3344}
3345
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003346void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3347 uint32_t stack_adjustment, bool is_float) {
3348 if (source.IsStackSlot()) {
3349 DCHECK(is_float);
3350 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3351 } else if (source.IsDoubleStackSlot()) {
3352 DCHECK(!is_float);
3353 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3354 } else {
3355 // Write the value to the temporary location on the stack and load to FP stack.
3356 if (is_float) {
3357 Location stack_temp = Location::StackSlot(temp_offset);
3358 codegen_->Move(stack_temp, source);
3359 __ flds(Address(CpuRegister(RSP), temp_offset));
3360 } else {
3361 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3362 codegen_->Move(stack_temp, source);
3363 __ fldl(Address(CpuRegister(RSP), temp_offset));
3364 }
3365 }
3366}
3367
3368void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003369 DataType::Type type = rem->GetResultType();
3370 bool is_float = type == DataType::Type::kFloat32;
3371 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003372 LocationSummary* locations = rem->GetLocations();
3373 Location first = locations->InAt(0);
3374 Location second = locations->InAt(1);
3375 Location out = locations->Out();
3376
3377 // Create stack space for 2 elements.
3378 // TODO: enhance register allocator to ask for stack temporaries.
3379 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3380
3381 // Load the values to the FP stack in reverse order, using temporaries if needed.
3382 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3383 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3384
3385 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003386 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003387 __ Bind(&retry);
3388 __ fprem();
3389
3390 // Move FP status to AX.
3391 __ fstsw();
3392
3393 // And see if the argument reduction is complete. This is signaled by the
3394 // C2 FPU flag bit set to 0.
3395 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3396 __ j(kNotEqual, &retry);
3397
3398 // We have settled on the final value. Retrieve it into an XMM register.
3399 // Store FP top of stack to real stack.
3400 if (is_float) {
3401 __ fsts(Address(CpuRegister(RSP), 0));
3402 } else {
3403 __ fstl(Address(CpuRegister(RSP), 0));
3404 }
3405
3406 // Pop the 2 items from the FP stack.
3407 __ fucompp();
3408
3409 // Load the value from the stack into an XMM register.
3410 DCHECK(out.IsFpuRegister()) << out;
3411 if (is_float) {
3412 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3413 } else {
3414 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3415 }
3416
3417 // And remove the temporary stack space we allocated.
3418 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3419}
3420
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003421void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3422 DCHECK(instruction->IsDiv() || instruction->IsRem());
3423
3424 LocationSummary* locations = instruction->GetLocations();
3425 Location second = locations->InAt(1);
3426 DCHECK(second.IsConstant());
3427
3428 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3429 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003430 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003431
3432 DCHECK(imm == 1 || imm == -1);
3433
3434 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003435 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003436 if (instruction->IsRem()) {
3437 __ xorl(output_register, output_register);
3438 } else {
3439 __ movl(output_register, input_register);
3440 if (imm == -1) {
3441 __ negl(output_register);
3442 }
3443 }
3444 break;
3445 }
3446
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003447 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003448 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003449 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003450 } else {
3451 __ movq(output_register, input_register);
3452 if (imm == -1) {
3453 __ negq(output_register);
3454 }
3455 }
3456 break;
3457 }
3458
3459 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003460 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003461 }
3462}
3463
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003464void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003465 LocationSummary* locations = instruction->GetLocations();
3466 Location second = locations->InAt(1);
3467
3468 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3469 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3470
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003471 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003472 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3473 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003474
3475 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3476
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003477 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003478 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003479 __ testl(numerator, numerator);
3480 __ cmov(kGreaterEqual, tmp, numerator);
3481 int shift = CTZ(imm);
3482 __ sarl(tmp, Immediate(shift));
3483
3484 if (imm < 0) {
3485 __ negl(tmp);
3486 }
3487
3488 __ movl(output_register, tmp);
3489 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003490 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003491 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3492
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003493 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003494 __ addq(rdx, numerator);
3495 __ testq(numerator, numerator);
3496 __ cmov(kGreaterEqual, rdx, numerator);
3497 int shift = CTZ(imm);
3498 __ sarq(rdx, Immediate(shift));
3499
3500 if (imm < 0) {
3501 __ negq(rdx);
3502 }
3503
3504 __ movq(output_register, rdx);
3505 }
3506}
3507
3508void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3509 DCHECK(instruction->IsDiv() || instruction->IsRem());
3510
3511 LocationSummary* locations = instruction->GetLocations();
3512 Location second = locations->InAt(1);
3513
3514 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3515 : locations->GetTemp(0).AsRegister<CpuRegister>();
3516 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3517 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3518 : locations->Out().AsRegister<CpuRegister>();
3519 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3520
3521 DCHECK_EQ(RAX, eax.AsRegister());
3522 DCHECK_EQ(RDX, edx.AsRegister());
3523 if (instruction->IsDiv()) {
3524 DCHECK_EQ(RAX, out.AsRegister());
3525 } else {
3526 DCHECK_EQ(RDX, out.AsRegister());
3527 }
3528
3529 int64_t magic;
3530 int shift;
3531
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003532 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003533 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003534 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3535
3536 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3537
3538 __ movl(numerator, eax);
3539
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003540 __ movl(eax, Immediate(magic));
3541 __ imull(numerator);
3542
3543 if (imm > 0 && magic < 0) {
3544 __ addl(edx, numerator);
3545 } else if (imm < 0 && magic > 0) {
3546 __ subl(edx, numerator);
3547 }
3548
3549 if (shift != 0) {
3550 __ sarl(edx, Immediate(shift));
3551 }
3552
3553 __ movl(eax, edx);
3554 __ shrl(edx, Immediate(31));
3555 __ addl(edx, eax);
3556
3557 if (instruction->IsRem()) {
3558 __ movl(eax, numerator);
3559 __ imull(edx, Immediate(imm));
3560 __ subl(eax, edx);
3561 __ movl(edx, eax);
3562 } else {
3563 __ movl(eax, edx);
3564 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003565 } else {
3566 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3567
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003568 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003569
3570 CpuRegister rax = eax;
3571 CpuRegister rdx = edx;
3572
3573 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3574
3575 // Save the numerator.
3576 __ movq(numerator, rax);
3577
3578 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003579 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003580
3581 // RDX:RAX = magic * numerator
3582 __ imulq(numerator);
3583
3584 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003585 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003586 __ addq(rdx, numerator);
3587 } else if (imm < 0 && magic > 0) {
3588 // RDX -= numerator
3589 __ subq(rdx, numerator);
3590 }
3591
3592 // Shift if needed.
3593 if (shift != 0) {
3594 __ sarq(rdx, Immediate(shift));
3595 }
3596
3597 // RDX += 1 if RDX < 0
3598 __ movq(rax, rdx);
3599 __ shrq(rdx, Immediate(63));
3600 __ addq(rdx, rax);
3601
3602 if (instruction->IsRem()) {
3603 __ movq(rax, numerator);
3604
3605 if (IsInt<32>(imm)) {
3606 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3607 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003608 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003609 }
3610
3611 __ subq(rax, rdx);
3612 __ movq(rdx, rax);
3613 } else {
3614 __ movq(rax, rdx);
3615 }
3616 }
3617}
3618
Calin Juravlebacfec32014-11-14 15:54:36 +00003619void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3620 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003621 DataType::Type type = instruction->GetResultType();
3622 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00003623
3624 bool is_div = instruction->IsDiv();
3625 LocationSummary* locations = instruction->GetLocations();
3626
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003627 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3628 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003629
Roland Levillain271ab9c2014-11-27 15:23:57 +00003630 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003631 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003632
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003633 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003634 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003635
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003636 if (imm == 0) {
3637 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3638 } else if (imm == 1 || imm == -1) {
3639 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003640 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003641 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003642 } else {
3643 DCHECK(imm <= -2 || imm >= 2);
3644 GenerateDivRemWithAnyConstant(instruction);
3645 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003646 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003647 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003648 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003649 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003650 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003651
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003652 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3653 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3654 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3655 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003656 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003657 __ cmpl(second_reg, Immediate(-1));
3658 __ j(kEqual, slow_path->GetEntryLabel());
3659 // edx:eax <- sign-extended of eax
3660 __ cdq();
3661 // eax = quotient, edx = remainder
3662 __ idivl(second_reg);
3663 } else {
3664 __ cmpq(second_reg, Immediate(-1));
3665 __ j(kEqual, slow_path->GetEntryLabel());
3666 // rdx:rax <- sign-extended of rax
3667 __ cqo();
3668 // rax = quotient, rdx = remainder
3669 __ idivq(second_reg);
3670 }
3671 __ Bind(slow_path->GetExitLabel());
3672 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003673}
3674
Calin Juravle7c4954d2014-10-28 16:57:40 +00003675void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3676 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003677 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003678 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003679 case DataType::Type::kInt32:
3680 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00003681 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003682 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003683 locations->SetOut(Location::SameAsFirstInput());
3684 // Intel uses edx:eax as the dividend.
3685 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003686 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3687 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3688 // output and request another temp.
3689 if (div->InputAt(1)->IsConstant()) {
3690 locations->AddTemp(Location::RequiresRegister());
3691 }
Calin Juravled0d48522014-11-04 16:40:20 +00003692 break;
3693 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003694
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003695 case DataType::Type::kFloat32:
3696 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003697 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003698 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003699 locations->SetOut(Location::SameAsFirstInput());
3700 break;
3701 }
3702
3703 default:
3704 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3705 }
3706}
3707
3708void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3709 LocationSummary* locations = div->GetLocations();
3710 Location first = locations->InAt(0);
3711 Location second = locations->InAt(1);
3712 DCHECK(first.Equals(locations->Out()));
3713
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003714 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003715 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003716 case DataType::Type::kInt32:
3717 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003718 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003719 break;
3720 }
3721
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003722 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003723 if (second.IsFpuRegister()) {
3724 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3725 } else if (second.IsConstant()) {
3726 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003727 codegen_->LiteralFloatAddress(
3728 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003729 } else {
3730 DCHECK(second.IsStackSlot());
3731 __ divss(first.AsFpuRegister<XmmRegister>(),
3732 Address(CpuRegister(RSP), second.GetStackIndex()));
3733 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003734 break;
3735 }
3736
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003737 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003738 if (second.IsFpuRegister()) {
3739 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3740 } else if (second.IsConstant()) {
3741 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003742 codegen_->LiteralDoubleAddress(
3743 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003744 } else {
3745 DCHECK(second.IsDoubleStackSlot());
3746 __ divsd(first.AsFpuRegister<XmmRegister>(),
3747 Address(CpuRegister(RSP), second.GetStackIndex()));
3748 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003749 break;
3750 }
3751
3752 default:
3753 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3754 }
3755}
3756
Calin Juravlebacfec32014-11-14 15:54:36 +00003757void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003758 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003759 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003760 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003761
3762 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003763 case DataType::Type::kInt32:
3764 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003765 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003766 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003767 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3768 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003769 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3770 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3771 // output and request another temp.
3772 if (rem->InputAt(1)->IsConstant()) {
3773 locations->AddTemp(Location::RequiresRegister());
3774 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003775 break;
3776 }
3777
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003778 case DataType::Type::kFloat32:
3779 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003780 locations->SetInAt(0, Location::Any());
3781 locations->SetInAt(1, Location::Any());
3782 locations->SetOut(Location::RequiresFpuRegister());
3783 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003784 break;
3785 }
3786
3787 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003788 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003789 }
3790}
3791
3792void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003793 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003794 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003795 case DataType::Type::kInt32:
3796 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003797 GenerateDivRemIntegral(rem);
3798 break;
3799 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003800 case DataType::Type::kFloat32:
3801 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003802 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003803 break;
3804 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003805 default:
3806 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3807 }
3808}
3809
Calin Juravled0d48522014-11-04 16:40:20 +00003810void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003811 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003812 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003813}
3814
3815void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003816 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003817 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003818 codegen_->AddSlowPath(slow_path);
3819
3820 LocationSummary* locations = instruction->GetLocations();
3821 Location value = locations->InAt(0);
3822
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003823 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003824 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003825 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003826 case DataType::Type::kInt8:
3827 case DataType::Type::kUint16:
3828 case DataType::Type::kInt16:
3829 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003830 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003831 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003832 __ j(kEqual, slow_path->GetEntryLabel());
3833 } else if (value.IsStackSlot()) {
3834 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3835 __ j(kEqual, slow_path->GetEntryLabel());
3836 } else {
3837 DCHECK(value.IsConstant()) << value;
3838 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003839 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003840 }
3841 }
3842 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003843 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003844 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003845 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003846 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003847 __ j(kEqual, slow_path->GetEntryLabel());
3848 } else if (value.IsDoubleStackSlot()) {
3849 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3850 __ j(kEqual, slow_path->GetEntryLabel());
3851 } else {
3852 DCHECK(value.IsConstant()) << value;
3853 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003854 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003855 }
3856 }
3857 break;
3858 }
3859 default:
3860 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003861 }
Calin Juravled0d48522014-11-04 16:40:20 +00003862}
3863
Calin Juravle9aec02f2014-11-18 23:06:35 +00003864void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3865 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3866
3867 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003868 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003869
3870 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003871 case DataType::Type::kInt32:
3872 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003873 locations->SetInAt(0, Location::RequiresRegister());
3874 // The shift count needs to be in CL.
3875 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3876 locations->SetOut(Location::SameAsFirstInput());
3877 break;
3878 }
3879 default:
3880 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3881 }
3882}
3883
3884void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3885 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3886
3887 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003888 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003889 Location second = locations->InAt(1);
3890
3891 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003892 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003893 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003894 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003895 if (op->IsShl()) {
3896 __ shll(first_reg, second_reg);
3897 } else if (op->IsShr()) {
3898 __ sarl(first_reg, second_reg);
3899 } else {
3900 __ shrl(first_reg, second_reg);
3901 }
3902 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003903 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003904 if (op->IsShl()) {
3905 __ shll(first_reg, imm);
3906 } else if (op->IsShr()) {
3907 __ sarl(first_reg, imm);
3908 } else {
3909 __ shrl(first_reg, imm);
3910 }
3911 }
3912 break;
3913 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003914 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003915 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003916 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003917 if (op->IsShl()) {
3918 __ shlq(first_reg, second_reg);
3919 } else if (op->IsShr()) {
3920 __ sarq(first_reg, second_reg);
3921 } else {
3922 __ shrq(first_reg, second_reg);
3923 }
3924 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003925 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003926 if (op->IsShl()) {
3927 __ shlq(first_reg, imm);
3928 } else if (op->IsShr()) {
3929 __ sarq(first_reg, imm);
3930 } else {
3931 __ shrq(first_reg, imm);
3932 }
3933 }
3934 break;
3935 }
3936 default:
3937 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003938 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003939 }
3940}
3941
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003942void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3943 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003944 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003945
3946 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003947 case DataType::Type::kInt32:
3948 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003949 locations->SetInAt(0, Location::RequiresRegister());
3950 // The shift count needs to be in CL (unless it is a constant).
3951 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3952 locations->SetOut(Location::SameAsFirstInput());
3953 break;
3954 }
3955 default:
3956 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3957 UNREACHABLE();
3958 }
3959}
3960
3961void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3962 LocationSummary* locations = ror->GetLocations();
3963 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3964 Location second = locations->InAt(1);
3965
3966 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003967 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003968 if (second.IsRegister()) {
3969 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3970 __ rorl(first_reg, second_reg);
3971 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003972 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003973 __ rorl(first_reg, imm);
3974 }
3975 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003976 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003977 if (second.IsRegister()) {
3978 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3979 __ rorq(first_reg, second_reg);
3980 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003981 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003982 __ rorq(first_reg, imm);
3983 }
3984 break;
3985 default:
3986 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3987 UNREACHABLE();
3988 }
3989}
3990
Calin Juravle9aec02f2014-11-18 23:06:35 +00003991void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3992 HandleShift(shl);
3993}
3994
3995void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3996 HandleShift(shl);
3997}
3998
3999void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4000 HandleShift(shr);
4001}
4002
4003void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4004 HandleShift(shr);
4005}
4006
4007void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4008 HandleShift(ushr);
4009}
4010
4011void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4012 HandleShift(ushr);
4013}
4014
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004015void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004016 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4017 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004018 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004019 if (instruction->IsStringAlloc()) {
4020 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4021 } else {
4022 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004023 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004024 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004025}
4026
4027void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004028 // Note: if heap poisoning is enabled, the entry point takes cares
4029 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004030 if (instruction->IsStringAlloc()) {
4031 // String is allocated through StringFactory. Call NewEmptyString entry point.
4032 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004033 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004034 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4035 __ call(Address(temp, code_offset.SizeValue()));
4036 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4037 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004038 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004039 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004040 DCHECK(!codegen_->IsLeafMethod());
4041 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004042}
4043
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004044void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004045 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4046 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004047 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004048 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004049 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4050 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004051}
4052
4053void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004054 // Note: if heap poisoning is enabled, the entry point takes cares
4055 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004056 QuickEntrypointEnum entrypoint =
4057 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4058 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004059 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004060 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004061}
4062
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004063void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004064 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004065 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004066 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4067 if (location.IsStackSlot()) {
4068 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4069 } else if (location.IsDoubleStackSlot()) {
4070 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4071 }
4072 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004073}
4074
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004075void InstructionCodeGeneratorX86_64::VisitParameterValue(
4076 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004077 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004078}
4079
4080void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4081 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004082 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004083 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4084}
4085
4086void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4087 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4088 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004089}
4090
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004091void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4092 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004093 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004094 locations->SetInAt(0, Location::RequiresRegister());
4095 locations->SetOut(Location::RequiresRegister());
4096}
4097
4098void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4099 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004100 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004101 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004102 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004103 __ movq(locations->Out().AsRegister<CpuRegister>(),
4104 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004105 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004106 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004107 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004108 __ movq(locations->Out().AsRegister<CpuRegister>(),
4109 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4110 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004111 __ movq(locations->Out().AsRegister<CpuRegister>(),
4112 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004113 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004114}
4115
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004116void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004117 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004118 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004119 locations->SetInAt(0, Location::RequiresRegister());
4120 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004121}
4122
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004123void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4124 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004125 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4126 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004127 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004128 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004129 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004130 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004131 break;
4132
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004133 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004134 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004135 break;
4136
4137 default:
4138 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4139 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004140}
4141
David Brazdil66d126e2015-04-03 16:02:44 +01004142void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4143 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004144 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004145 locations->SetInAt(0, Location::RequiresRegister());
4146 locations->SetOut(Location::SameAsFirstInput());
4147}
4148
4149void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004150 LocationSummary* locations = bool_not->GetLocations();
4151 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4152 locations->Out().AsRegister<CpuRegister>().AsRegister());
4153 Location out = locations->Out();
4154 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4155}
4156
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004157void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004158 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004159 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004160 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004161 locations->SetInAt(i, Location::Any());
4162 }
4163 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004164}
4165
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004166void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004167 LOG(FATAL) << "Unimplemented";
4168}
4169
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004170void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004171 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004172 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004173 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004174 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4175 */
4176 switch (kind) {
4177 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004178 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004179 break;
4180 }
4181 case MemBarrierKind::kAnyStore:
4182 case MemBarrierKind::kLoadAny:
4183 case MemBarrierKind::kStoreStore: {
4184 // nop
4185 break;
4186 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004187 case MemBarrierKind::kNTStoreStore:
4188 // Non-Temporal Store/Store needs an explicit fence.
4189 MemoryFence(/* non-temporal */ true);
4190 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004191 }
4192}
4193
4194void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4195 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4196
Roland Levillain0d5a2812015-11-13 10:07:31 +00004197 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004198 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004199 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004200 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4201 object_field_get_with_read_barrier
4202 ? LocationSummary::kCallOnSlowPath
4203 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004204 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004205 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004206 }
Calin Juravle52c48962014-12-16 17:02:57 +00004207 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004208 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004209 locations->SetOut(Location::RequiresFpuRegister());
4210 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004211 // The output overlaps for an object field get when read barriers
4212 // are enabled: we do not want the move to overwrite the object's
4213 // location, as we need it to emit the read barrier.
4214 locations->SetOut(
4215 Location::RequiresRegister(),
4216 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004217 }
Calin Juravle52c48962014-12-16 17:02:57 +00004218}
4219
4220void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4221 const FieldInfo& field_info) {
4222 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4223
4224 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004225 Location base_loc = locations->InAt(0);
4226 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004227 Location out = locations->Out();
4228 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004229 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4230 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004231 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4232
Vladimir Marko61b92282017-10-11 13:23:17 +01004233 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004234 case DataType::Type::kBool:
4235 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004236 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4237 break;
4238 }
4239
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004240 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004241 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4242 break;
4243 }
4244
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004245 case DataType::Type::kUint16: {
4246 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004247 break;
4248 }
4249
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004250 case DataType::Type::kInt16: {
4251 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004252 break;
4253 }
4254
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004255 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004256 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4257 break;
4258 }
4259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004260 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004261 // /* HeapReference<Object> */ out = *(base + offset)
4262 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004263 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004264 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004265 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004266 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004267 if (is_volatile) {
4268 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4269 }
4270 } else {
4271 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4272 codegen_->MaybeRecordImplicitNullCheck(instruction);
4273 if (is_volatile) {
4274 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4275 }
4276 // If read barriers are enabled, emit read barriers other than
4277 // Baker's using a slow path (and also unpoison the loaded
4278 // reference, if heap poisoning is enabled).
4279 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4280 }
4281 break;
4282 }
4283
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004284 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004285 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4286 break;
4287 }
4288
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004289 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004290 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4291 break;
4292 }
4293
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004294 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004295 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4296 break;
4297 }
4298
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004299 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004300 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004301 UNREACHABLE();
4302 }
4303
Vladimir Marko61b92282017-10-11 13:23:17 +01004304 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004305 // Potential implicit null checks, in the case of reference
4306 // fields, are handled in the previous switch statement.
4307 } else {
4308 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004309 }
Roland Levillain4d027112015-07-01 15:41:14 +01004310
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004311 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004312 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004313 // Memory barriers, in the case of references, are also handled
4314 // in the previous switch statement.
4315 } else {
4316 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4317 }
Roland Levillain4d027112015-07-01 15:41:14 +01004318 }
Calin Juravle52c48962014-12-16 17:02:57 +00004319}
4320
4321void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4322 const FieldInfo& field_info) {
4323 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4324
4325 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004326 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004327 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004328 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004329 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004330 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004331
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004332 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004333 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004334 if (is_volatile) {
4335 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4336 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4337 } else {
4338 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4339 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004340 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004341 if (is_volatile) {
4342 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4343 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4344 } else {
4345 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4346 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004347 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004348 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004349 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004350 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004351 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004352 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004353 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004354 locations->AddTemp(Location::RequiresRegister());
4355 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004356}
4357
Calin Juravle52c48962014-12-16 17:02:57 +00004358void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004359 const FieldInfo& field_info,
4360 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004361 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4362
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004363 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004364 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4365 Location value = locations->InAt(1);
4366 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004367 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00004368 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4369
4370 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004371 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004372 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004373
Mark Mendellea5af682015-10-22 17:35:49 -04004374 bool maybe_record_implicit_null_check_done = false;
4375
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004376 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004377 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004378 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004379 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04004380 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004381 __ movb(Address(base, offset),
4382 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004383 } else {
4384 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4385 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004386 break;
4387 }
4388
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004389 case DataType::Type::kUint16:
4390 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04004391 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004392 __ movw(Address(base, offset),
4393 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004394 } else {
4395 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4396 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004397 break;
4398 }
4399
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004400 case DataType::Type::kInt32:
4401 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04004402 if (value.IsConstant()) {
4403 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004404 // `field_type == DataType::Type::kReference` implies `v == 0`.
4405 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01004406 // Note: if heap poisoning is enabled, no need to poison
4407 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004408 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004409 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004410 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004411 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4412 __ movl(temp, value.AsRegister<CpuRegister>());
4413 __ PoisonHeapReference(temp);
4414 __ movl(Address(base, offset), temp);
4415 } else {
4416 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4417 }
Mark Mendell40741f32015-04-20 22:10:34 -04004418 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004419 break;
4420 }
4421
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004422 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04004423 if (value.IsConstant()) {
4424 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004425 codegen_->MoveInt64ToAddress(Address(base, offset),
4426 Address(base, offset + sizeof(int32_t)),
4427 v,
4428 instruction);
4429 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004430 } else {
4431 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4432 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004433 break;
4434 }
4435
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004436 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04004437 if (value.IsConstant()) {
4438 int32_t v =
4439 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4440 __ movl(Address(base, offset), Immediate(v));
4441 } else {
4442 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4443 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004444 break;
4445 }
4446
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004447 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04004448 if (value.IsConstant()) {
4449 int64_t v =
4450 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4451 codegen_->MoveInt64ToAddress(Address(base, offset),
4452 Address(base, offset + sizeof(int32_t)),
4453 v,
4454 instruction);
4455 maybe_record_implicit_null_check_done = true;
4456 } else {
4457 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4458 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004459 break;
4460 }
4461
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004462 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004463 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004464 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004465 }
Calin Juravle52c48962014-12-16 17:02:57 +00004466
Mark Mendellea5af682015-10-22 17:35:49 -04004467 if (!maybe_record_implicit_null_check_done) {
4468 codegen_->MaybeRecordImplicitNullCheck(instruction);
4469 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004470
4471 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4472 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4473 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004474 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004475 }
4476
Calin Juravle52c48962014-12-16 17:02:57 +00004477 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004478 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004479 }
4480}
4481
4482void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4483 HandleFieldSet(instruction, instruction->GetFieldInfo());
4484}
4485
4486void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004487 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004488}
4489
4490void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004491 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004492}
4493
4494void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004495 HandleFieldGet(instruction, instruction->GetFieldInfo());
4496}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004497
Calin Juravle52c48962014-12-16 17:02:57 +00004498void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4499 HandleFieldGet(instruction);
4500}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004501
Calin Juravle52c48962014-12-16 17:02:57 +00004502void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4503 HandleFieldGet(instruction, instruction->GetFieldInfo());
4504}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004505
Calin Juravle52c48962014-12-16 17:02:57 +00004506void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4507 HandleFieldSet(instruction, instruction->GetFieldInfo());
4508}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004509
Calin Juravle52c48962014-12-16 17:02:57 +00004510void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004511 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004512}
4513
Calin Juravlee460d1d2015-09-29 04:52:17 +01004514void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4515 HUnresolvedInstanceFieldGet* instruction) {
4516 FieldAccessCallingConventionX86_64 calling_convention;
4517 codegen_->CreateUnresolvedFieldLocationSummary(
4518 instruction, instruction->GetFieldType(), calling_convention);
4519}
4520
4521void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4522 HUnresolvedInstanceFieldGet* instruction) {
4523 FieldAccessCallingConventionX86_64 calling_convention;
4524 codegen_->GenerateUnresolvedFieldAccess(instruction,
4525 instruction->GetFieldType(),
4526 instruction->GetFieldIndex(),
4527 instruction->GetDexPc(),
4528 calling_convention);
4529}
4530
4531void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4532 HUnresolvedInstanceFieldSet* instruction) {
4533 FieldAccessCallingConventionX86_64 calling_convention;
4534 codegen_->CreateUnresolvedFieldLocationSummary(
4535 instruction, instruction->GetFieldType(), calling_convention);
4536}
4537
4538void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4539 HUnresolvedInstanceFieldSet* instruction) {
4540 FieldAccessCallingConventionX86_64 calling_convention;
4541 codegen_->GenerateUnresolvedFieldAccess(instruction,
4542 instruction->GetFieldType(),
4543 instruction->GetFieldIndex(),
4544 instruction->GetDexPc(),
4545 calling_convention);
4546}
4547
4548void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4549 HUnresolvedStaticFieldGet* instruction) {
4550 FieldAccessCallingConventionX86_64 calling_convention;
4551 codegen_->CreateUnresolvedFieldLocationSummary(
4552 instruction, instruction->GetFieldType(), calling_convention);
4553}
4554
4555void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4556 HUnresolvedStaticFieldGet* instruction) {
4557 FieldAccessCallingConventionX86_64 calling_convention;
4558 codegen_->GenerateUnresolvedFieldAccess(instruction,
4559 instruction->GetFieldType(),
4560 instruction->GetFieldIndex(),
4561 instruction->GetDexPc(),
4562 calling_convention);
4563}
4564
4565void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4566 HUnresolvedStaticFieldSet* instruction) {
4567 FieldAccessCallingConventionX86_64 calling_convention;
4568 codegen_->CreateUnresolvedFieldLocationSummary(
4569 instruction, instruction->GetFieldType(), calling_convention);
4570}
4571
4572void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4573 HUnresolvedStaticFieldSet* instruction) {
4574 FieldAccessCallingConventionX86_64 calling_convention;
4575 codegen_->GenerateUnresolvedFieldAccess(instruction,
4576 instruction->GetFieldType(),
4577 instruction->GetFieldIndex(),
4578 instruction->GetDexPc(),
4579 calling_convention);
4580}
4581
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004582void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004583 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4584 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4585 ? Location::RequiresRegister()
4586 : Location::Any();
4587 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004588}
4589
Calin Juravle2ae48182016-03-16 14:05:09 +00004590void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4591 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004592 return;
4593 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004594 LocationSummary* locations = instruction->GetLocations();
4595 Location obj = locations->InAt(0);
4596
4597 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004598 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004599}
4600
Calin Juravle2ae48182016-03-16 14:05:09 +00004601void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004602 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004603 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004604
4605 LocationSummary* locations = instruction->GetLocations();
4606 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004607
4608 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004609 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004610 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004611 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004612 } else {
4613 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004614 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004615 __ jmp(slow_path->GetEntryLabel());
4616 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004617 }
4618 __ j(kEqual, slow_path->GetEntryLabel());
4619}
4620
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004621void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004622 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004623}
4624
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004625void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004626 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004627 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004628 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004629 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4630 object_array_get_with_read_barrier
4631 ? LocationSummary::kCallOnSlowPath
4632 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004633 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004634 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004635 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004636 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004637 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004638 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004639 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4640 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004641 // The output overlaps for an object array get when read barriers
4642 // are enabled: we do not want the move to overwrite the array's
4643 // location, as we need it to emit the read barrier.
4644 locations->SetOut(
4645 Location::RequiresRegister(),
4646 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004647 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004648}
4649
4650void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4651 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004652 Location obj_loc = locations->InAt(0);
4653 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004654 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004655 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004656 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004657
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004658 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004659 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004660 case DataType::Type::kBool:
4661 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004662 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004663 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004664 break;
4665 }
4666
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004667 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004668 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004669 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004670 break;
4671 }
4672
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004673 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004674 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004675 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4676 // Branch cases into compressed and uncompressed for each index's type.
4677 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4678 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00004679 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004680 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004681 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4682 "Expecting 0=compressed, 1=uncompressed");
4683 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004684 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4685 __ jmp(&done);
4686 __ Bind(&not_compressed);
4687 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4688 __ Bind(&done);
4689 } else {
4690 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4691 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004692 break;
4693 }
4694
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004695 case DataType::Type::kInt16: {
4696 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4697 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4698 break;
4699 }
4700
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004701 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004702 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004703 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004704 break;
4705 }
4706
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004707 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004708 static_assert(
4709 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4710 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004711 // /* HeapReference<Object> */ out =
4712 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4713 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004714 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004715 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004716 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004717 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004718 } else {
4719 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004720 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4721 codegen_->MaybeRecordImplicitNullCheck(instruction);
4722 // If read barriers are enabled, emit read barriers other than
4723 // Baker's using a slow path (and also unpoison the loaded
4724 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004725 if (index.IsConstant()) {
4726 uint32_t offset =
4727 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004728 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4729 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004730 codegen_->MaybeGenerateReadBarrierSlow(
4731 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4732 }
4733 }
4734 break;
4735 }
4736
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004737 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004738 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004739 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004740 break;
4741 }
4742
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004743 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004744 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004745 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004746 break;
4747 }
4748
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004749 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004750 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004751 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004752 break;
4753 }
4754
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004755 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004756 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004757 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004758 }
Roland Levillain4d027112015-07-01 15:41:14 +01004759
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004760 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004761 // Potential implicit null checks, in the case of reference
4762 // arrays, are handled in the previous switch statement.
4763 } else {
4764 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004765 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004766}
4767
4768void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004769 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004770
4771 bool needs_write_barrier =
4772 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004773 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004774
Vladimir Markoca6fff82017-10-03 14:49:14 +01004775 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004776 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004777 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004778 LocationSummary::kCallOnSlowPath :
4779 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004780
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004781 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004782 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004783 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04004784 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004785 } else {
4786 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4787 }
4788
4789 if (needs_write_barrier) {
4790 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004791 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004792 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004793 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004794}
4795
4796void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4797 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004798 Location array_loc = locations->InAt(0);
4799 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004800 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004801 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004802 DataType::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004803 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004804 bool needs_write_barrier =
4805 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004806 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4807 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4808 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004809
4810 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004811 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004812 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004813 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004814 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004815 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004816 if (value.IsRegister()) {
4817 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004818 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004819 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004820 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004821 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004822 break;
4823 }
4824
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004825 case DataType::Type::kUint16:
4826 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004827 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004828 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004829 if (value.IsRegister()) {
4830 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004831 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004832 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01004833 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004834 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004835 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004836 break;
4837 }
4838
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004839 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004840 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004841 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004842
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004843 if (!value.IsRegister()) {
4844 // Just setting null.
4845 DCHECK(instruction->InputAt(2)->IsNullConstant());
4846 DCHECK(value.IsConstant()) << value;
4847 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004848 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004849 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004850 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004851 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004852 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004853
4854 DCHECK(needs_write_barrier);
4855 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004856 // We cannot use a NearLabel for `done`, as its range may be too
4857 // short when Baker read barriers are enabled.
4858 Label done;
4859 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004860 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004861 Location temp_loc = locations->GetTemp(0);
4862 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004863 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004864 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004865 codegen_->AddSlowPath(slow_path);
4866 if (instruction->GetValueCanBeNull()) {
4867 __ testl(register_value, register_value);
4868 __ j(kNotEqual, &not_null);
4869 __ movl(address, Immediate(0));
4870 codegen_->MaybeRecordImplicitNullCheck(instruction);
4871 __ jmp(&done);
4872 __ Bind(&not_null);
4873 }
4874
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004875 // Note that when Baker read barriers are enabled, the type
4876 // checks are performed without read barriers. This is fine,
4877 // even in the case where a class object is in the from-space
4878 // after the flip, as a comparison involving such a type would
4879 // not produce a false positive; it may of course produce a
4880 // false negative, in which case we would take the ArraySet
4881 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004882
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004883 // /* HeapReference<Class> */ temp = array->klass_
4884 __ movl(temp, Address(array, class_offset));
4885 codegen_->MaybeRecordImplicitNullCheck(instruction);
4886 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004887
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004888 // /* HeapReference<Class> */ temp = temp->component_type_
4889 __ movl(temp, Address(temp, component_offset));
4890 // If heap poisoning is enabled, no need to unpoison `temp`
4891 // nor the object reference in `register_value->klass`, as
4892 // we are comparing two poisoned references.
4893 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004894
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004895 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4896 __ j(kEqual, &do_put);
4897 // If heap poisoning is enabled, the `temp` reference has
4898 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004899 __ MaybeUnpoisonHeapReference(temp);
4900
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004901 // If heap poisoning is enabled, no need to unpoison the
4902 // heap reference loaded below, as it is only used for a
4903 // comparison with null.
4904 __ cmpl(Address(temp, super_offset), Immediate(0));
4905 __ j(kNotEqual, slow_path->GetEntryLabel());
4906 __ Bind(&do_put);
4907 } else {
4908 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004909 }
4910 }
4911
4912 if (kPoisonHeapReferences) {
4913 __ movl(temp, register_value);
4914 __ PoisonHeapReference(temp);
4915 __ movl(address, temp);
4916 } else {
4917 __ movl(address, register_value);
4918 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004919 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004920 codegen_->MaybeRecordImplicitNullCheck(instruction);
4921 }
4922
4923 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4924 codegen_->MarkGCCard(
4925 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4926 __ Bind(&done);
4927
4928 if (slow_path != nullptr) {
4929 __ Bind(slow_path->GetExitLabel());
4930 }
4931
4932 break;
4933 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004934
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004935 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004936 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004937 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004938 if (value.IsRegister()) {
4939 __ movl(address, value.AsRegister<CpuRegister>());
4940 } else {
4941 DCHECK(value.IsConstant()) << value;
4942 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4943 __ movl(address, Immediate(v));
4944 }
4945 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004946 break;
4947 }
4948
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004949 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004950 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004951 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004952 if (value.IsRegister()) {
4953 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004954 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004955 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004956 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004957 Address address_high =
4958 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004959 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004960 }
4961 break;
4962 }
4963
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004964 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004965 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004966 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004967 if (value.IsFpuRegister()) {
4968 __ movss(address, value.AsFpuRegister<XmmRegister>());
4969 } else {
4970 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004971 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04004972 __ movl(address, Immediate(v));
4973 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004974 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004975 break;
4976 }
4977
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004978 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004979 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004980 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004981 if (value.IsFpuRegister()) {
4982 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4983 codegen_->MaybeRecordImplicitNullCheck(instruction);
4984 } else {
4985 int64_t v =
4986 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004987 Address address_high =
4988 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004989 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4990 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004991 break;
4992 }
4993
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004994 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004995 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004996 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004997 }
4998}
4999
5000void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005001 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005002 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005003 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005004 if (!instruction->IsEmittedAtUseSite()) {
5005 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5006 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005007}
5008
5009void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005010 if (instruction->IsEmittedAtUseSite()) {
5011 return;
5012 }
5013
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005014 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005015 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005016 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5017 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005018 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005019 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005020 // Mask out most significant bit in case the array is String's array of char.
5021 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005022 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005023 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005024}
5025
5026void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005027 RegisterSet caller_saves = RegisterSet::Empty();
5028 InvokeRuntimeCallingConvention calling_convention;
5029 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5030 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5031 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005032 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005033 HInstruction* length = instruction->InputAt(1);
5034 if (!length->IsEmittedAtUseSite()) {
5035 locations->SetInAt(1, Location::RegisterOrConstant(length));
5036 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005037}
5038
5039void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5040 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005041 Location index_loc = locations->InAt(0);
5042 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005043 SlowPathCode* slow_path =
5044 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005045
Mark Mendell99dbd682015-04-22 16:18:52 -04005046 if (length_loc.IsConstant()) {
5047 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5048 if (index_loc.IsConstant()) {
5049 // BCE will remove the bounds check if we are guarenteed to pass.
5050 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5051 if (index < 0 || index >= length) {
5052 codegen_->AddSlowPath(slow_path);
5053 __ jmp(slow_path->GetEntryLabel());
5054 } else {
5055 // Some optimization after BCE may have generated this, and we should not
5056 // generate a bounds check if it is a valid range.
5057 }
5058 return;
5059 }
5060
5061 // We have to reverse the jump condition because the length is the constant.
5062 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5063 __ cmpl(index_reg, Immediate(length));
5064 codegen_->AddSlowPath(slow_path);
5065 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005066 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005067 HInstruction* array_length = instruction->InputAt(1);
5068 if (array_length->IsEmittedAtUseSite()) {
5069 // Address the length field in the array.
5070 DCHECK(array_length->IsArrayLength());
5071 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5072 Location array_loc = array_length->GetLocations()->InAt(0);
5073 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005074 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005075 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5076 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005077 CpuRegister length_reg = CpuRegister(TMP);
5078 __ movl(length_reg, array_len);
5079 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005080 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005081 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005082 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005083 // Checking the bound for general case:
5084 // Array of char or String's array when the compression feature off.
5085 if (index_loc.IsConstant()) {
5086 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5087 __ cmpl(array_len, Immediate(value));
5088 } else {
5089 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5090 }
5091 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005092 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005093 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005094 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005095 }
5096 codegen_->AddSlowPath(slow_path);
5097 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005098 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005099}
5100
5101void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5102 CpuRegister card,
5103 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005104 CpuRegister value,
5105 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005106 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005107 if (value_can_be_null) {
5108 __ testl(value, value);
5109 __ j(kEqual, &is_null);
5110 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005111 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005112 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005113 __ movq(temp, object);
5114 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005115 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005116 if (value_can_be_null) {
5117 __ Bind(&is_null);
5118 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005119}
5120
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005121void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005122 LOG(FATAL) << "Unimplemented";
5123}
5124
5125void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005126 if (instruction->GetNext()->IsSuspendCheck() &&
5127 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5128 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5129 // The back edge will generate the suspend check.
5130 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5131 }
5132
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005133 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5134}
5135
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005136void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005137 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5138 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005139 // In suspend check slow path, usually there are no caller-save registers at all.
5140 // If SIMD instructions are present, however, we force spilling all live SIMD
5141 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005142 locations->SetCustomSlowPathCallerSaves(
5143 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005144}
5145
5146void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005147 HBasicBlock* block = instruction->GetBlock();
5148 if (block->GetLoopInformation() != nullptr) {
5149 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5150 // The back edge will generate the suspend check.
5151 return;
5152 }
5153 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5154 // The goto will generate the suspend check.
5155 return;
5156 }
5157 GenerateSuspendCheck(instruction, nullptr);
5158}
5159
5160void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5161 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005162 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005163 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5164 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005165 slow_path =
5166 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005167 instruction->SetSlowPath(slow_path);
5168 codegen_->AddSlowPath(slow_path);
5169 if (successor != nullptr) {
5170 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005171 }
5172 } else {
5173 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5174 }
5175
Andreas Gampe542451c2016-07-26 09:02:02 -07005176 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005177 /* no_rip */ true),
5178 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005179 if (successor == nullptr) {
5180 __ j(kNotEqual, slow_path->GetEntryLabel());
5181 __ Bind(slow_path->GetReturnLabel());
5182 } else {
5183 __ j(kEqual, codegen_->GetLabelOf(successor));
5184 __ jmp(slow_path->GetEntryLabel());
5185 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005186}
5187
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005188X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5189 return codegen_->GetAssembler();
5190}
5191
5192void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005193 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005194 Location source = move->GetSource();
5195 Location destination = move->GetDestination();
5196
5197 if (source.IsRegister()) {
5198 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005199 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005200 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005201 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005202 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005203 } else {
5204 DCHECK(destination.IsDoubleStackSlot());
5205 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005206 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005207 }
5208 } else if (source.IsStackSlot()) {
5209 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005210 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005211 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005212 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005213 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005214 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005215 } else {
5216 DCHECK(destination.IsStackSlot());
5217 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5218 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5219 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005220 } else if (source.IsDoubleStackSlot()) {
5221 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005222 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005223 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005224 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005225 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5226 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005227 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005228 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005229 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5230 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5231 }
Aart Bik5576f372017-03-23 16:17:37 -07005232 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005233 if (destination.IsFpuRegister()) {
5234 __ movups(destination.AsFpuRegister<XmmRegister>(),
5235 Address(CpuRegister(RSP), source.GetStackIndex()));
5236 } else {
5237 DCHECK(destination.IsSIMDStackSlot());
5238 size_t high = kX86_64WordSize;
5239 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5240 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5241 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5242 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5243 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005244 } else if (source.IsConstant()) {
5245 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005246 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5247 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005248 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005249 if (value == 0) {
5250 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5251 } else {
5252 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5253 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005254 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005255 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005256 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005257 }
5258 } else if (constant->IsLongConstant()) {
5259 int64_t value = constant->AsLongConstant()->GetValue();
5260 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005261 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005262 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005263 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005264 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005265 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005266 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005267 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005268 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005269 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005270 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005271 } else {
5272 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005273 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005274 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5275 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005276 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005277 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005278 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005279 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005280 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005281 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005282 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005283 } else {
5284 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005285 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005286 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005287 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005288 } else if (source.IsFpuRegister()) {
5289 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005290 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005291 } else if (destination.IsStackSlot()) {
5292 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005293 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005294 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005295 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005296 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005297 } else {
5298 DCHECK(destination.IsSIMDStackSlot());
5299 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5300 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005301 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005302 }
5303}
5304
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005305void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005306 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005307 __ movl(Address(CpuRegister(RSP), mem), reg);
5308 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005309}
5310
Mark Mendell8a1c7282015-06-29 15:41:28 -04005311void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5312 __ movq(CpuRegister(TMP), reg1);
5313 __ movq(reg1, reg2);
5314 __ movq(reg2, CpuRegister(TMP));
5315}
5316
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005317void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5318 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5319 __ movq(Address(CpuRegister(RSP), mem), reg);
5320 __ movq(reg, CpuRegister(TMP));
5321}
5322
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005323void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5324 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5325 __ movss(Address(CpuRegister(RSP), mem), reg);
5326 __ movd(reg, CpuRegister(TMP));
5327}
5328
5329void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5330 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5331 __ movsd(Address(CpuRegister(RSP), mem), reg);
5332 __ movd(reg, CpuRegister(TMP));
5333}
5334
Aart Bikcfe50bb2017-12-12 14:54:12 -08005335void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
5336 size_t extra_slot = 2 * kX86_64WordSize;
5337 __ subq(CpuRegister(RSP), Immediate(extra_slot));
5338 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
5339 ExchangeMemory64(0, mem + extra_slot, 2);
5340 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
5341 __ addq(CpuRegister(RSP), Immediate(extra_slot));
5342}
5343
5344void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
5345 ScratchRegisterScope ensure_scratch(
5346 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5347
5348 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5349 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5350 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5351 Address(CpuRegister(RSP), mem2 + stack_offset));
5352 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5353 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5354 CpuRegister(ensure_scratch.GetRegister()));
5355}
5356
5357void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
5358 ScratchRegisterScope ensure_scratch(
5359 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5360
5361 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5362
5363 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
5364 for (int i = 0; i < num_of_qwords; i++) {
5365 __ movq(CpuRegister(TMP),
5366 Address(CpuRegister(RSP), mem1 + stack_offset));
5367 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5368 Address(CpuRegister(RSP), mem2 + stack_offset));
5369 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
5370 CpuRegister(TMP));
5371 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5372 CpuRegister(ensure_scratch.GetRegister()));
5373 stack_offset += kX86_64WordSize;
5374 }
5375}
5376
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005377void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005378 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005379 Location source = move->GetSource();
5380 Location destination = move->GetDestination();
5381
5382 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005383 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005384 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005385 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005386 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005387 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005388 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005389 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005390 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005391 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005392 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005393 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005394 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005395 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005396 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005397 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5398 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5399 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005400 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005401 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005402 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005403 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005404 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005405 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005406 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005407 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08005408 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
5409 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
5410 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
5411 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5412 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
5413 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005414 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005415 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005416 }
5417}
5418
5419
5420void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5421 __ pushq(CpuRegister(reg));
5422}
5423
5424
5425void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5426 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005427}
5428
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005429void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005430 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00005431 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
5432 const size_t status_byte_offset =
5433 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
5434 constexpr uint32_t shifted_initialized_value =
5435 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
5436
5437 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00005438 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005439 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005440 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005441}
5442
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005443HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5444 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005445 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005446 case HLoadClass::LoadKind::kInvalid:
5447 LOG(FATAL) << "UNREACHABLE";
5448 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005449 case HLoadClass::LoadKind::kReferrersClass:
5450 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005451 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005452 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005453 case HLoadClass::LoadKind::kBssEntry:
5454 DCHECK(!Runtime::Current()->UseJitCompilation());
5455 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005456 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005457 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005458 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005459 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005460 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005461 break;
5462 }
5463 return desired_class_load_kind;
5464}
5465
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005466void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005467 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005468 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005469 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005470 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005471 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005472 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005473 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005474 return;
5475 }
Vladimir Marko41559982017-01-06 14:04:23 +00005476 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005477
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005478 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5479 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005480 ? LocationSummary::kCallOnSlowPath
5481 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005482 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005483 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005484 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005485 }
5486
Vladimir Marko41559982017-01-06 14:04:23 +00005487 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005488 locations->SetInAt(0, Location::RequiresRegister());
5489 }
5490 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005491 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5492 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5493 // Rely on the type resolution and/or initialization to save everything.
5494 // Custom calling convention: RAX serves as both input and output.
5495 RegisterSet caller_saves = RegisterSet::Empty();
5496 caller_saves.Add(Location::RegisterLocation(RAX));
5497 locations->SetCustomSlowPathCallerSaves(caller_saves);
5498 } else {
5499 // For non-Baker read barrier we have a temp-clobbering call.
5500 }
5501 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005502}
5503
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005504Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01005505 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005506 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005507 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005508 // Add a patch entry and return the label.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005509 jit_class_patches_.emplace_back(dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005510 PatchInfo<Label>* info = &jit_class_patches_.back();
5511 return &info->label;
5512}
5513
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005514// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5515// move.
5516void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005517 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005518 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005519 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005520 return;
5521 }
Vladimir Marko41559982017-01-06 14:04:23 +00005522 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005523
Vladimir Marko41559982017-01-06 14:04:23 +00005524 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005525 Location out_loc = locations->Out();
5526 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005527
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005528 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5529 ? kWithoutReadBarrier
5530 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005531 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005532 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005533 case HLoadClass::LoadKind::kReferrersClass: {
5534 DCHECK(!cls->CanCallRuntime());
5535 DCHECK(!cls->MustGenerateClinitCheck());
5536 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5537 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5538 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005539 cls,
5540 out_loc,
5541 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005542 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005543 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005544 break;
5545 }
5546 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005547 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005548 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005549 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko1998cd02017-01-13 13:02:58 +00005550 codegen_->RecordBootTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005551 break;
5552 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005553 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005554 uint32_t address = dchecked_integral_cast<uint32_t>(
5555 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
5556 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07005557 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005558 break;
5559 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005560 case HLoadClass::LoadKind::kBootImageClassTable: {
5561 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
5562 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5563 codegen_->RecordBootTypePatch(cls);
5564 // Extract the reference from the slot data, i.e. clear the hash bits.
5565 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
5566 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
5567 if (masked_hash != 0) {
5568 __ subl(out, Immediate(masked_hash));
5569 }
5570 break;
5571 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005572 case HLoadClass::LoadKind::kBssEntry: {
5573 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5574 /* no_rip */ false);
5575 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
5576 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5577 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
5578 generate_null_check = true;
5579 break;
5580 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005581 case HLoadClass::LoadKind::kJitTableAddress: {
5582 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5583 /* no_rip */ true);
5584 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005585 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005586 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00005587 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005588 break;
5589 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005590 default:
5591 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5592 UNREACHABLE();
5593 }
5594
5595 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5596 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005597 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005598 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5599 codegen_->AddSlowPath(slow_path);
5600 if (generate_null_check) {
5601 __ testl(out, out);
5602 __ j(kEqual, slow_path->GetEntryLabel());
5603 }
5604 if (cls->MustGenerateClinitCheck()) {
5605 GenerateClassInitializationCheck(slow_path, out);
5606 } else {
5607 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005608 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005609 }
5610}
5611
5612void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5613 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005614 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005615 locations->SetInAt(0, Location::RequiresRegister());
5616 if (check->HasUses()) {
5617 locations->SetOut(Location::SameAsFirstInput());
5618 }
5619}
5620
5621void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005622 // We assume the class to not be null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005623 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005624 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005625 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005626 GenerateClassInitializationCheck(slow_path,
5627 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005628}
5629
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005630HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5631 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005632 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005633 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005634 case HLoadString::LoadKind::kBootImageInternTable:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005635 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005636 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005637 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005638 case HLoadString::LoadKind::kJitTableAddress:
5639 DCHECK(Runtime::Current()->UseJitCompilation());
5640 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005641 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005642 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005643 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005644 }
5645 return desired_string_load_kind;
5646}
5647
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005648void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005649 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01005650 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005651 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005652 locations->SetOut(Location::RegisterLocation(RAX));
5653 } else {
5654 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005655 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5656 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005657 // Rely on the pResolveString to save everything.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005658 // Custom calling convention: RAX serves as both input and output.
5659 RegisterSet caller_saves = RegisterSet::Empty();
5660 caller_saves.Add(Location::RegisterLocation(RAX));
5661 locations->SetCustomSlowPathCallerSaves(caller_saves);
5662 } else {
5663 // For non-Baker read barrier we have a temp-clobbering call.
5664 }
5665 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005666 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005667}
5668
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005669Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01005670 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005671 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005672 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005673 // Add a patch entry and return the label.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005674 jit_string_patches_.emplace_back(dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005675 PatchInfo<Label>* info = &jit_string_patches_.back();
5676 return &info->label;
5677}
5678
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005679// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5680// move.
5681void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005682 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005683 Location out_loc = locations->Out();
5684 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005685
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005686 switch (load->GetLoadKind()) {
5687 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005688 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005689 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005690 codegen_->RecordBootStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005691 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005692 }
5693 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005694 uint32_t address = dchecked_integral_cast<uint32_t>(
5695 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5696 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07005697 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005698 return;
5699 }
5700 case HLoadString::LoadKind::kBootImageInternTable: {
5701 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
5702 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5703 codegen_->RecordBootStringPatch(load);
5704 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005705 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005706 case HLoadString::LoadKind::kBssEntry: {
5707 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5708 /* no_rip */ false);
5709 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5710 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005711 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005712 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005713 codegen_->AddSlowPath(slow_path);
5714 __ testl(out, out);
5715 __ j(kEqual, slow_path->GetEntryLabel());
5716 __ Bind(slow_path->GetExitLabel());
5717 return;
5718 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005719 case HLoadString::LoadKind::kJitTableAddress: {
5720 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5721 /* no_rip */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005722 Label* fixup_label = codegen_->NewJitRootStringPatch(
5723 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005724 // /* GcRoot<mirror::String> */ out = *address
5725 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
5726 return;
5727 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005728 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005729 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005730 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005731
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005732 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005733 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005734 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005735 codegen_->InvokeRuntime(kQuickResolveString,
5736 load,
5737 load->GetDexPc());
5738 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005739}
5740
David Brazdilcb1c0552015-08-04 16:22:25 +01005741static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005742 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005743 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005744}
5745
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005746void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5747 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005748 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005749 locations->SetOut(Location::RequiresRegister());
5750}
5751
5752void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005753 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5754}
5755
5756void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005757 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01005758}
5759
5760void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5761 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005762}
5763
5764void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005765 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5766 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005767 InvokeRuntimeCallingConvention calling_convention;
5768 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5769}
5770
5771void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005772 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005773 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005774}
5775
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005776static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00005777 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005778 // We need a temporary for holding the iftable length.
5779 return true;
5780 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005781 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005782 !kUseBakerReadBarrier &&
5783 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005784 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5785 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5786}
5787
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005788static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5789 return kEmitCompilerReadBarrier &&
5790 !kUseBakerReadBarrier &&
5791 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5792 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5793 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5794}
5795
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005796void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005797 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005798 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005799 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005800 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005801 case TypeCheckKind::kExactCheck:
5802 case TypeCheckKind::kAbstractClassCheck:
5803 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00005804 case TypeCheckKind::kArrayObjectCheck: {
5805 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
5806 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
5807 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005808 break;
Vladimir Marko87584542017-12-12 17:47:52 +00005809 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005810 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005811 case TypeCheckKind::kUnresolvedCheck:
5812 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005813 call_kind = LocationSummary::kCallOnSlowPath;
5814 break;
5815 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005816
Vladimir Markoca6fff82017-10-03 14:49:14 +01005817 LocationSummary* locations =
5818 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005819 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005820 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005821 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005822 locations->SetInAt(0, Location::RequiresRegister());
5823 locations->SetInAt(1, Location::Any());
5824 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5825 locations->SetOut(Location::RequiresRegister());
5826 // When read barriers are enabled, we need a temporary register for
5827 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005828 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005829 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005830 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005831}
5832
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005833void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005834 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005835 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005836 Location obj_loc = locations->InAt(0);
5837 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005838 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005839 Location out_loc = locations->Out();
5840 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005841 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005842 locations->GetTemp(0) :
5843 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005844 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005845 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5846 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5847 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005848 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005849 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005850
5851 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005852 // Avoid null check if we know obj is not null.
5853 if (instruction->MustDoNullCheck()) {
5854 __ testl(obj, obj);
5855 __ j(kEqual, &zero);
5856 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005857
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005858 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005859 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00005860 ReadBarrierOption read_barrier_option =
5861 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005862 // /* HeapReference<Class> */ out = obj->klass_
5863 GenerateReferenceLoadTwoRegisters(instruction,
5864 out_loc,
5865 obj_loc,
5866 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00005867 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005868 if (cls.IsRegister()) {
5869 __ cmpl(out, cls.AsRegister<CpuRegister>());
5870 } else {
5871 DCHECK(cls.IsStackSlot()) << cls;
5872 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5873 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005874 if (zero.IsLinked()) {
5875 // Classes must be equal for the instanceof to succeed.
5876 __ j(kNotEqual, &zero);
5877 __ movl(out, Immediate(1));
5878 __ jmp(&done);
5879 } else {
5880 __ setcc(kEqual, out);
5881 // setcc only sets the low byte.
5882 __ andl(out, Immediate(1));
5883 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005884 break;
5885 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005886
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005887 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00005888 ReadBarrierOption read_barrier_option =
5889 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005890 // /* HeapReference<Class> */ out = obj->klass_
5891 GenerateReferenceLoadTwoRegisters(instruction,
5892 out_loc,
5893 obj_loc,
5894 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00005895 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005896 // If the class is abstract, we eagerly fetch the super class of the
5897 // object to avoid doing a comparison we know will fail.
5898 NearLabel loop, success;
5899 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005900 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005901 GenerateReferenceLoadOneRegister(instruction,
5902 out_loc,
5903 super_offset,
5904 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00005905 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005906 __ testl(out, out);
5907 // If `out` is null, we use it for the result, and jump to `done`.
5908 __ j(kEqual, &done);
5909 if (cls.IsRegister()) {
5910 __ cmpl(out, cls.AsRegister<CpuRegister>());
5911 } else {
5912 DCHECK(cls.IsStackSlot()) << cls;
5913 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5914 }
5915 __ j(kNotEqual, &loop);
5916 __ movl(out, Immediate(1));
5917 if (zero.IsLinked()) {
5918 __ jmp(&done);
5919 }
5920 break;
5921 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005922
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005923 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00005924 ReadBarrierOption read_barrier_option =
5925 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005926 // /* HeapReference<Class> */ out = obj->klass_
5927 GenerateReferenceLoadTwoRegisters(instruction,
5928 out_loc,
5929 obj_loc,
5930 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00005931 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005932 // Walk over the class hierarchy to find a match.
5933 NearLabel loop, success;
5934 __ Bind(&loop);
5935 if (cls.IsRegister()) {
5936 __ cmpl(out, cls.AsRegister<CpuRegister>());
5937 } else {
5938 DCHECK(cls.IsStackSlot()) << cls;
5939 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5940 }
5941 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005942 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005943 GenerateReferenceLoadOneRegister(instruction,
5944 out_loc,
5945 super_offset,
5946 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00005947 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005948 __ testl(out, out);
5949 __ j(kNotEqual, &loop);
5950 // If `out` is null, we use it for the result, and jump to `done`.
5951 __ jmp(&done);
5952 __ Bind(&success);
5953 __ movl(out, Immediate(1));
5954 if (zero.IsLinked()) {
5955 __ jmp(&done);
5956 }
5957 break;
5958 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005959
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005960 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00005961 ReadBarrierOption read_barrier_option =
5962 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005963 // /* HeapReference<Class> */ out = obj->klass_
5964 GenerateReferenceLoadTwoRegisters(instruction,
5965 out_loc,
5966 obj_loc,
5967 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00005968 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005969 // Do an exact check.
5970 NearLabel exact_check;
5971 if (cls.IsRegister()) {
5972 __ cmpl(out, cls.AsRegister<CpuRegister>());
5973 } else {
5974 DCHECK(cls.IsStackSlot()) << cls;
5975 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5976 }
5977 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005978 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005979 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005980 GenerateReferenceLoadOneRegister(instruction,
5981 out_loc,
5982 component_offset,
5983 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00005984 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005985 __ testl(out, out);
5986 // If `out` is null, we use it for the result, and jump to `done`.
5987 __ j(kEqual, &done);
5988 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5989 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005990 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005991 __ movl(out, Immediate(1));
5992 __ jmp(&done);
5993 break;
5994 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005995
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005996 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005997 // No read barrier since the slow path will retry upon failure.
5998 // /* HeapReference<Class> */ out = obj->klass_
5999 GenerateReferenceLoadTwoRegisters(instruction,
6000 out_loc,
6001 obj_loc,
6002 class_offset,
6003 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006004 if (cls.IsRegister()) {
6005 __ cmpl(out, cls.AsRegister<CpuRegister>());
6006 } else {
6007 DCHECK(cls.IsStackSlot()) << cls;
6008 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6009 }
6010 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006011 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6012 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006013 codegen_->AddSlowPath(slow_path);
6014 __ j(kNotEqual, slow_path->GetEntryLabel());
6015 __ movl(out, Immediate(1));
6016 if (zero.IsLinked()) {
6017 __ jmp(&done);
6018 }
6019 break;
6020 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006021
Calin Juravle98893e12015-10-02 21:05:03 +01006022 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006023 case TypeCheckKind::kInterfaceCheck: {
6024 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006025 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006026 // cases.
6027 //
6028 // We cannot directly call the InstanceofNonTrivial runtime
6029 // entry point without resorting to a type checking slow path
6030 // here (i.e. by calling InvokeRuntime directly), as it would
6031 // require to assign fixed registers for the inputs of this
6032 // HInstanceOf instruction (following the runtime calling
6033 // convention), which might be cluttered by the potential first
6034 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006035 //
6036 // TODO: Introduce a new runtime entry point taking the object
6037 // to test (instead of its class) as argument, and let it deal
6038 // with the read barrier issues. This will let us refactor this
6039 // case of the `switch` code as it was previously (with a direct
6040 // call to the runtime not using a type checking slow path).
6041 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006042 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006043 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6044 instruction, /* is_fatal */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006045 codegen_->AddSlowPath(slow_path);
6046 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006047 if (zero.IsLinked()) {
6048 __ jmp(&done);
6049 }
6050 break;
6051 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006052 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006053
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006054 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006055 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006056 __ xorl(out, out);
6057 }
6058
6059 if (done.IsLinked()) {
6060 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006061 }
6062
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006063 if (slow_path != nullptr) {
6064 __ Bind(slow_path->GetExitLabel());
6065 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006066}
6067
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006068void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006069 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006070 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006071 LocationSummary* locations =
6072 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006073 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006074 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6075 // Require a register for the interface check since there is a loop that compares the class to
6076 // a memory address.
6077 locations->SetInAt(1, Location::RequiresRegister());
6078 } else {
6079 locations->SetInAt(1, Location::Any());
6080 }
6081
Roland Levillain0d5a2812015-11-13 10:07:31 +00006082 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6083 locations->AddTemp(Location::RequiresRegister());
6084 // When read barriers are enabled, we need an additional temporary
6085 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006086 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006087 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006088 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006089}
6090
6091void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006092 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006093 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006094 Location obj_loc = locations->InAt(0);
6095 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006096 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006097 Location temp_loc = locations->GetTemp(0);
6098 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006099 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006100 locations->GetTemp(1) :
6101 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006102 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6103 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6104 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6105 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6106 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6107 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006108 const uint32_t object_array_data_offset =
6109 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006110
Vladimir Marko87584542017-12-12 17:47:52 +00006111 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006112 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006113 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6114 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006115 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006116
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006117
6118 NearLabel done;
6119 // Avoid null check if we know obj is not null.
6120 if (instruction->MustDoNullCheck()) {
6121 __ testl(obj, obj);
6122 __ j(kEqual, &done);
6123 }
6124
Roland Levillain0d5a2812015-11-13 10:07:31 +00006125 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006126 case TypeCheckKind::kExactCheck:
6127 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006128 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006129 GenerateReferenceLoadTwoRegisters(instruction,
6130 temp_loc,
6131 obj_loc,
6132 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006133 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006134 if (cls.IsRegister()) {
6135 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6136 } else {
6137 DCHECK(cls.IsStackSlot()) << cls;
6138 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6139 }
6140 // Jump to slow path for throwing the exception or doing a
6141 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006142 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006143 break;
6144 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006145
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006146 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006147 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006148 GenerateReferenceLoadTwoRegisters(instruction,
6149 temp_loc,
6150 obj_loc,
6151 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006152 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006153 // If the class is abstract, we eagerly fetch the super class of the
6154 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006155 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006156 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006157 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006158 GenerateReferenceLoadOneRegister(instruction,
6159 temp_loc,
6160 super_offset,
6161 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006162 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006163
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006164 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6165 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006166 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006167 // Otherwise, compare the classes.
6168 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006169 if (cls.IsRegister()) {
6170 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6171 } else {
6172 DCHECK(cls.IsStackSlot()) << cls;
6173 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6174 }
6175 __ j(kNotEqual, &loop);
6176 break;
6177 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006178
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006179 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006180 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006181 GenerateReferenceLoadTwoRegisters(instruction,
6182 temp_loc,
6183 obj_loc,
6184 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006185 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006186 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006187 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006188 __ Bind(&loop);
6189 if (cls.IsRegister()) {
6190 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6191 } else {
6192 DCHECK(cls.IsStackSlot()) << cls;
6193 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6194 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006195 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006196
Roland Levillain0d5a2812015-11-13 10:07:31 +00006197 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006198 GenerateReferenceLoadOneRegister(instruction,
6199 temp_loc,
6200 super_offset,
6201 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006202 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006203
6204 // If the class reference currently in `temp` is not null, jump
6205 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006206 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006207 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006208 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006209 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006210 break;
6211 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006212
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006213 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006214 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006215 GenerateReferenceLoadTwoRegisters(instruction,
6216 temp_loc,
6217 obj_loc,
6218 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006219 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006220 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006221 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006222 if (cls.IsRegister()) {
6223 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6224 } else {
6225 DCHECK(cls.IsStackSlot()) << cls;
6226 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6227 }
6228 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006229
6230 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006231 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006232 GenerateReferenceLoadOneRegister(instruction,
6233 temp_loc,
6234 component_offset,
6235 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006236 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006237
6238 // If the component type is not null (i.e. the object is indeed
6239 // an array), jump to label `check_non_primitive_component_type`
6240 // to further check that this component type is not a primitive
6241 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006242 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006243 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006244 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006245 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006246 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006247 break;
6248 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006249
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006250 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006251 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006252 //
6253 // We cannot directly call the CheckCast runtime entry point
6254 // without resorting to a type checking slow path here (i.e. by
6255 // calling InvokeRuntime directly), as it would require to
6256 // assign fixed registers for the inputs of this HInstanceOf
6257 // instruction (following the runtime calling convention), which
6258 // might be cluttered by the potential first read barrier
6259 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006260 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006261 break;
6262 }
6263
6264 case TypeCheckKind::kInterfaceCheck:
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006265 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
6266 // We can not get false positives by doing this.
6267 // /* HeapReference<Class> */ temp = obj->klass_
6268 GenerateReferenceLoadTwoRegisters(instruction,
6269 temp_loc,
6270 obj_loc,
6271 class_offset,
6272 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006273
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006274 // /* HeapReference<Class> */ temp = temp->iftable_
6275 GenerateReferenceLoadTwoRegisters(instruction,
6276 temp_loc,
6277 temp_loc,
6278 iftable_offset,
6279 kWithoutReadBarrier);
6280 // Iftable is never null.
6281 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
6282 // Maybe poison the `cls` for direct comparison with memory.
6283 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
6284 // Loop through the iftable and check if any class matches.
6285 NearLabel start_loop;
6286 __ Bind(&start_loop);
6287 // Need to subtract first to handle the empty array case.
6288 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6289 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6290 // Go to next interface if the classes do not match.
6291 __ cmpl(cls.AsRegister<CpuRegister>(),
6292 CodeGeneratorX86_64::ArrayAddress(temp,
6293 maybe_temp2_loc,
6294 TIMES_4,
6295 object_array_data_offset));
6296 __ j(kNotEqual, &start_loop); // Return if same class.
6297 // If `cls` was poisoned above, unpoison it.
6298 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006299 break;
6300 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006301
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006302 if (done.IsLinked()) {
6303 __ Bind(&done);
6304 }
6305
Roland Levillain0d5a2812015-11-13 10:07:31 +00006306 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006307}
6308
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006309void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006310 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6311 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006312 InvokeRuntimeCallingConvention calling_convention;
6313 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6314}
6315
6316void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006317 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006318 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006319 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006320 if (instruction->IsEnter()) {
6321 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6322 } else {
6323 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6324 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006325}
6326
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006327void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6328void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6329void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6330
6331void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6332 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006333 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006334 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
6335 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006336 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006337 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006338 locations->SetOut(Location::SameAsFirstInput());
6339}
6340
6341void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6342 HandleBitwiseOperation(instruction);
6343}
6344
6345void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6346 HandleBitwiseOperation(instruction);
6347}
6348
6349void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6350 HandleBitwiseOperation(instruction);
6351}
6352
6353void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6354 LocationSummary* locations = instruction->GetLocations();
6355 Location first = locations->InAt(0);
6356 Location second = locations->InAt(1);
6357 DCHECK(first.Equals(locations->Out()));
6358
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006359 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006360 if (second.IsRegister()) {
6361 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006362 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006363 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006364 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006365 } else {
6366 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006367 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006368 }
6369 } else if (second.IsConstant()) {
6370 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6371 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006372 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006373 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006374 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006375 } else {
6376 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006377 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006378 }
6379 } else {
6380 Address address(CpuRegister(RSP), second.GetStackIndex());
6381 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006382 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006383 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006384 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006385 } else {
6386 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006387 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006388 }
6389 }
6390 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006391 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006392 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6393 bool second_is_constant = false;
6394 int64_t value = 0;
6395 if (second.IsConstant()) {
6396 second_is_constant = true;
6397 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006398 }
Mark Mendell40741f32015-04-20 22:10:34 -04006399 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006400
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006401 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006402 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006403 if (is_int32_value) {
6404 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6405 } else {
6406 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6407 }
6408 } else if (second.IsDoubleStackSlot()) {
6409 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006410 } else {
6411 __ andq(first_reg, second.AsRegister<CpuRegister>());
6412 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006413 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006414 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006415 if (is_int32_value) {
6416 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6417 } else {
6418 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6419 }
6420 } else if (second.IsDoubleStackSlot()) {
6421 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006422 } else {
6423 __ orq(first_reg, second.AsRegister<CpuRegister>());
6424 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006425 } else {
6426 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006427 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006428 if (is_int32_value) {
6429 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6430 } else {
6431 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6432 }
6433 } else if (second.IsDoubleStackSlot()) {
6434 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006435 } else {
6436 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6437 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006438 }
6439 }
6440}
6441
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006442void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6443 HInstruction* instruction,
6444 Location out,
6445 uint32_t offset,
6446 Location maybe_temp,
6447 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006448 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006449 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006450 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006451 if (kUseBakerReadBarrier) {
6452 // Load with fast path based Baker's read barrier.
6453 // /* HeapReference<Object> */ out = *(out + offset)
6454 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006455 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006456 } else {
6457 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006458 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006459 // in the following move operation, as we will need it for the
6460 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006461 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006462 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006463 // /* HeapReference<Object> */ out = *(out + offset)
6464 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006465 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006466 }
6467 } else {
6468 // Plain load with no read barrier.
6469 // /* HeapReference<Object> */ out = *(out + offset)
6470 __ movl(out_reg, Address(out_reg, offset));
6471 __ MaybeUnpoisonHeapReference(out_reg);
6472 }
6473}
6474
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006475void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6476 HInstruction* instruction,
6477 Location out,
6478 Location obj,
6479 uint32_t offset,
6480 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006481 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6482 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006483 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006484 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006485 if (kUseBakerReadBarrier) {
6486 // Load with fast path based Baker's read barrier.
6487 // /* HeapReference<Object> */ out = *(obj + offset)
6488 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006489 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006490 } else {
6491 // Load with slow path based read barrier.
6492 // /* HeapReference<Object> */ out = *(obj + offset)
6493 __ movl(out_reg, Address(obj_reg, offset));
6494 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6495 }
6496 } else {
6497 // Plain load with no read barrier.
6498 // /* HeapReference<Object> */ out = *(obj + offset)
6499 __ movl(out_reg, Address(obj_reg, offset));
6500 __ MaybeUnpoisonHeapReference(out_reg);
6501 }
6502}
6503
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006504void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6505 HInstruction* instruction,
6506 Location root,
6507 const Address& address,
6508 Label* fixup_label,
6509 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006510 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006511 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006512 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006513 if (kUseBakerReadBarrier) {
6514 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6515 // Baker's read barrier are used:
6516 //
Roland Levillaind966ce72017-02-09 16:20:14 +00006517 // root = obj.field;
6518 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6519 // if (temp != null) {
6520 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006521 // }
6522
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006523 // /* GcRoot<mirror::Object> */ root = *address
6524 __ movl(root_reg, address);
6525 if (fixup_label != nullptr) {
6526 __ Bind(fixup_label);
6527 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006528 static_assert(
6529 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6530 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6531 "have different sizes.");
6532 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6533 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6534 "have different sizes.");
6535
Vladimir Marko953437b2016-08-24 08:30:46 +00006536 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006537 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006538 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006539 codegen_->AddSlowPath(slow_path);
6540
Roland Levillaind966ce72017-02-09 16:20:14 +00006541 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
6542 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01006543 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Roland Levillaind966ce72017-02-09 16:20:14 +00006544 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip */ true), Immediate(0));
6545 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006546 __ j(kNotEqual, slow_path->GetEntryLabel());
6547 __ Bind(slow_path->GetExitLabel());
6548 } else {
6549 // GC root loaded through a slow path for read barriers other
6550 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006551 // /* GcRoot<mirror::Object>* */ root = address
6552 __ leaq(root_reg, address);
6553 if (fixup_label != nullptr) {
6554 __ Bind(fixup_label);
6555 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006556 // /* mirror::Object* */ root = root->Read()
6557 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6558 }
6559 } else {
6560 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006561 // /* GcRoot<mirror::Object> */ root = *address
6562 __ movl(root_reg, address);
6563 if (fixup_label != nullptr) {
6564 __ Bind(fixup_label);
6565 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006566 // Note that GC roots are not affected by heap poisoning, thus we
6567 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006568 }
6569}
6570
6571void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6572 Location ref,
6573 CpuRegister obj,
6574 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006575 bool needs_null_check) {
6576 DCHECK(kEmitCompilerReadBarrier);
6577 DCHECK(kUseBakerReadBarrier);
6578
6579 // /* HeapReference<Object> */ ref = *(obj + offset)
6580 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006581 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006582}
6583
6584void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6585 Location ref,
6586 CpuRegister obj,
6587 uint32_t data_offset,
6588 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006589 bool needs_null_check) {
6590 DCHECK(kEmitCompilerReadBarrier);
6591 DCHECK(kUseBakerReadBarrier);
6592
Roland Levillain3d312422016-06-23 13:53:42 +01006593 static_assert(
6594 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6595 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006596 // /* HeapReference<Object> */ ref =
6597 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006598 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006599 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006600}
6601
6602void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6603 Location ref,
6604 CpuRegister obj,
6605 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006606 bool needs_null_check,
6607 bool always_update_field,
6608 CpuRegister* temp1,
6609 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006610 DCHECK(kEmitCompilerReadBarrier);
6611 DCHECK(kUseBakerReadBarrier);
6612
6613 // In slow path based read barriers, the read barrier call is
6614 // inserted after the original load. However, in fast path based
6615 // Baker's read barriers, we need to perform the load of
6616 // mirror::Object::monitor_ *before* the original reference load.
6617 // This load-load ordering is required by the read barrier.
6618 // The fast path/slow path (for Baker's algorithm) should look like:
6619 //
6620 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6621 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6622 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006623 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006624 // if (is_gray) {
6625 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6626 // }
6627 //
6628 // Note: the original implementation in ReadBarrier::Barrier is
6629 // slightly more complex as:
6630 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006631 // the high-bits of rb_state, which are expected to be all zeroes
6632 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6633 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006634 // - it performs additional checks that we do not do here for
6635 // performance reasons.
6636
6637 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006638 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6639
Vladimir Marko953437b2016-08-24 08:30:46 +00006640 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006641 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6642 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006643 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6644 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6645 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6646
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006647 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006648 // ref = ReadBarrier::Mark(ref);
6649 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6650 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006651 if (needs_null_check) {
6652 MaybeRecordImplicitNullCheck(instruction);
6653 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006654
6655 // Load fence to prevent load-load reordering.
6656 // Note that this is a no-op, thanks to the x86-64 memory model.
6657 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6658
6659 // The actual reference load.
6660 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006661 __ movl(ref_reg, src); // Flags are unaffected.
6662
6663 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6664 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006665 SlowPathCode* slow_path;
6666 if (always_update_field) {
6667 DCHECK(temp1 != nullptr);
6668 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01006669 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006670 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6671 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006672 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006673 instruction, ref, /* unpoison_ref_before_marking */ true);
6674 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006675 AddSlowPath(slow_path);
6676
6677 // We have done the "if" of the gray bit check above, now branch based on the flags.
6678 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006679
6680 // Object* ref = ref_addr->AsMirrorPtr()
6681 __ MaybeUnpoisonHeapReference(ref_reg);
6682
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006683 __ Bind(slow_path->GetExitLabel());
6684}
6685
6686void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6687 Location out,
6688 Location ref,
6689 Location obj,
6690 uint32_t offset,
6691 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006692 DCHECK(kEmitCompilerReadBarrier);
6693
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006694 // Insert a slow path based read barrier *after* the reference load.
6695 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006696 // If heap poisoning is enabled, the unpoisoning of the loaded
6697 // reference will be carried out by the runtime within the slow
6698 // path.
6699 //
6700 // Note that `ref` currently does not get unpoisoned (when heap
6701 // poisoning is enabled), which is alright as the `ref` argument is
6702 // not used by the artReadBarrierSlow entry point.
6703 //
6704 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006705 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00006706 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6707 AddSlowPath(slow_path);
6708
Roland Levillain0d5a2812015-11-13 10:07:31 +00006709 __ jmp(slow_path->GetEntryLabel());
6710 __ Bind(slow_path->GetExitLabel());
6711}
6712
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006713void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6714 Location out,
6715 Location ref,
6716 Location obj,
6717 uint32_t offset,
6718 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006719 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006720 // Baker's read barriers shall be handled by the fast path
6721 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6722 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006723 // If heap poisoning is enabled, unpoisoning will be taken care of
6724 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006725 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006726 } else if (kPoisonHeapReferences) {
6727 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6728 }
6729}
6730
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006731void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6732 Location out,
6733 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006734 DCHECK(kEmitCompilerReadBarrier);
6735
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006736 // Insert a slow path based read barrier *after* the GC root load.
6737 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006738 // Note that GC roots are not affected by heap poisoning, so we do
6739 // not need to do anything special for this here.
6740 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006741 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006742 AddSlowPath(slow_path);
6743
Roland Levillain0d5a2812015-11-13 10:07:31 +00006744 __ jmp(slow_path->GetEntryLabel());
6745 __ Bind(slow_path->GetExitLabel());
6746}
6747
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006748void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006749 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006750 LOG(FATAL) << "Unreachable";
6751}
6752
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006753void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006754 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006755 LOG(FATAL) << "Unreachable";
6756}
6757
Mark Mendellfe57faa2015-09-18 09:26:15 -04006758// Simple implementation of packed switch - generate cascaded compare/jumps.
6759void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6760 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006761 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006762 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006763 locations->AddTemp(Location::RequiresRegister());
6764 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006765}
6766
6767void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6768 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006769 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006770 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006771 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6772 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6773 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006774 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6775
6776 // Should we generate smaller inline compare/jumps?
6777 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6778 // Figure out the correct compare values and jump conditions.
6779 // Handle the first compare/branch as a special case because it might
6780 // jump to the default case.
6781 DCHECK_GT(num_entries, 2u);
6782 Condition first_condition;
6783 uint32_t index;
6784 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6785 if (lower_bound != 0) {
6786 first_condition = kLess;
6787 __ cmpl(value_reg_in, Immediate(lower_bound));
6788 __ j(first_condition, codegen_->GetLabelOf(default_block));
6789 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6790
6791 index = 1;
6792 } else {
6793 // Handle all the compare/jumps below.
6794 first_condition = kBelow;
6795 index = 0;
6796 }
6797
6798 // Handle the rest of the compare/jumps.
6799 for (; index + 1 < num_entries; index += 2) {
6800 int32_t compare_to_value = lower_bound + index + 1;
6801 __ cmpl(value_reg_in, Immediate(compare_to_value));
6802 // Jump to successors[index] if value < case_value[index].
6803 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6804 // Jump to successors[index + 1] if value == case_value[index + 1].
6805 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6806 }
6807
6808 if (index != num_entries) {
6809 // There are an odd number of entries. Handle the last one.
6810 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006811 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006812 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6813 }
6814
6815 // And the default for any other value.
6816 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6817 __ jmp(codegen_->GetLabelOf(default_block));
6818 }
6819 return;
6820 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006821
6822 // Remove the bias, if needed.
6823 Register value_reg_out = value_reg_in.AsRegister();
6824 if (lower_bound != 0) {
6825 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6826 value_reg_out = temp_reg.AsRegister();
6827 }
6828 CpuRegister value_reg(value_reg_out);
6829
6830 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006831 __ cmpl(value_reg, Immediate(num_entries - 1));
6832 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006833
Mark Mendell9c86b482015-09-18 13:36:07 -04006834 // We are in the range of the table.
6835 // Load the address of the jump table in the constant area.
6836 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006837
Mark Mendell9c86b482015-09-18 13:36:07 -04006838 // Load the (signed) offset from the jump table.
6839 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6840
6841 // Add the offset to the address of the table base.
6842 __ addq(temp_reg, base_reg);
6843
6844 // And jump.
6845 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006846}
6847
xueliang.zhonge0eb4832017-10-30 13:43:14 +00006848void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
6849 ATTRIBUTE_UNUSED) {
6850 LOG(FATAL) << "Unreachable";
6851}
6852
6853void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
6854 ATTRIBUTE_UNUSED) {
6855 LOG(FATAL) << "Unreachable";
6856}
6857
Aart Bikc5d47542016-01-27 17:00:35 -08006858void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6859 if (value == 0) {
6860 __ xorl(dest, dest);
6861 } else {
6862 __ movl(dest, Immediate(value));
6863 }
6864}
6865
Mark Mendell92e83bf2015-05-07 11:25:03 -04006866void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6867 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006868 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006869 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006870 } else if (IsUint<32>(value)) {
6871 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006872 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6873 } else {
6874 __ movq(dest, Immediate(value));
6875 }
6876}
6877
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006878void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6879 if (value == 0) {
6880 __ xorps(dest, dest);
6881 } else {
6882 __ movss(dest, LiteralInt32Address(value));
6883 }
6884}
6885
6886void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6887 if (value == 0) {
6888 __ xorpd(dest, dest);
6889 } else {
6890 __ movsd(dest, LiteralInt64Address(value));
6891 }
6892}
6893
6894void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6895 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6896}
6897
6898void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6899 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6900}
6901
Aart Bika19616e2016-02-01 18:57:58 -08006902void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6903 if (value == 0) {
6904 __ testl(dest, dest);
6905 } else {
6906 __ cmpl(dest, Immediate(value));
6907 }
6908}
6909
6910void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6911 if (IsInt<32>(value)) {
6912 if (value == 0) {
6913 __ testq(dest, dest);
6914 } else {
6915 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6916 }
6917 } else {
6918 // Value won't fit in an int.
6919 __ cmpq(dest, LiteralInt64Address(value));
6920 }
6921}
6922
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006923void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6924 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006925 GenerateIntCompare(lhs_reg, rhs);
6926}
6927
6928void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006929 if (rhs.IsConstant()) {
6930 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006931 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006932 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006933 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006934 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006935 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006936 }
6937}
6938
6939void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6940 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6941 if (rhs.IsConstant()) {
6942 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6943 Compare64BitValue(lhs_reg, value);
6944 } else if (rhs.IsDoubleStackSlot()) {
6945 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6946 } else {
6947 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6948 }
6949}
6950
6951Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6952 Location index,
6953 ScaleFactor scale,
6954 uint32_t data_offset) {
6955 return index.IsConstant() ?
6956 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6957 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6958}
6959
Mark Mendellcfa410b2015-05-25 16:02:44 -04006960void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6961 DCHECK(dest.IsDoubleStackSlot());
6962 if (IsInt<32>(value)) {
6963 // Can move directly as an int32 constant.
6964 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6965 Immediate(static_cast<int32_t>(value)));
6966 } else {
6967 Load64BitValue(CpuRegister(TMP), value);
6968 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6969 }
6970}
6971
Mark Mendell9c86b482015-09-18 13:36:07 -04006972/**
6973 * Class to handle late fixup of offsets into constant area.
6974 */
6975class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6976 public:
6977 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6978 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6979
6980 protected:
6981 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6982
6983 CodeGeneratorX86_64* codegen_;
6984
6985 private:
6986 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6987 // Patch the correct offset for the instruction. We use the address of the
6988 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6989 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6990 int32_t relative_position = constant_offset - pos;
6991
6992 // Patch in the right value.
6993 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6994 }
6995
6996 // Location in constant area that the fixup refers to.
6997 size_t offset_into_constant_area_;
6998};
6999
7000/**
7001 t * Class to handle late fixup of offsets to a jump table that will be created in the
7002 * constant area.
7003 */
7004class JumpTableRIPFixup : public RIPFixup {
7005 public:
7006 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7007 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7008
7009 void CreateJumpTable() {
7010 X86_64Assembler* assembler = codegen_->GetAssembler();
7011
7012 // Ensure that the reference to the jump table has the correct offset.
7013 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7014 SetOffset(offset_in_constant_table);
7015
7016 // Compute the offset from the start of the function to this jump table.
7017 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7018
7019 // Populate the jump table with the correct values for the jump table.
7020 int32_t num_entries = switch_instr_->GetNumEntries();
7021 HBasicBlock* block = switch_instr_->GetBlock();
7022 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7023 // The value that we want is the target offset - the position of the table.
7024 for (int32_t i = 0; i < num_entries; i++) {
7025 HBasicBlock* b = successors[i];
7026 Label* l = codegen_->GetLabelOf(b);
7027 DCHECK(l->IsBound());
7028 int32_t offset_to_block = l->Position() - current_table_offset;
7029 assembler->AppendInt32(offset_to_block);
7030 }
7031 }
7032
7033 private:
7034 const HPackedSwitch* switch_instr_;
7035};
7036
Mark Mendellf55c3e02015-03-26 21:07:46 -04007037void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7038 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007039 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007040 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7041 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007042 assembler->Align(4, 0);
7043 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007044
7045 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007046 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007047 jump_table->CreateJumpTable();
7048 }
7049
7050 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007051 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007052 }
7053
7054 // And finish up.
7055 CodeGenerator::Finalize(allocator);
7056}
7057
Mark Mendellf55c3e02015-03-26 21:07:46 -04007058Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007059 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007060 return Address::RIP(fixup);
7061}
7062
7063Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007064 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007065 return Address::RIP(fixup);
7066}
7067
7068Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007069 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007070 return Address::RIP(fixup);
7071}
7072
7073Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007074 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007075 return Address::RIP(fixup);
7076}
7077
Andreas Gampe85b62f22015-09-09 13:15:38 -07007078// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007079void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007080 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007081 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007082 return;
7083 }
7084
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007085 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007086
7087 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7088 if (trg.Equals(return_loc)) {
7089 return;
7090 }
7091
7092 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007093 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007094 parallel_move.AddMove(return_loc, trg, type, nullptr);
7095 GetMoveResolver()->EmitNativeCode(&parallel_move);
7096}
7097
Mark Mendell9c86b482015-09-18 13:36:07 -04007098Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7099 // Create a fixup to be used to create and address the jump table.
7100 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007101 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007102
7103 // We have to populate the jump tables.
7104 fixups_to_jump_tables_.push_back(table_fixup);
7105 return Address::RIP(table_fixup);
7106}
7107
Mark Mendellea5af682015-10-22 17:35:49 -04007108void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7109 const Address& addr_high,
7110 int64_t v,
7111 HInstruction* instruction) {
7112 if (IsInt<32>(v)) {
7113 int32_t v_32 = v;
7114 __ movq(addr_low, Immediate(v_32));
7115 MaybeRecordImplicitNullCheck(instruction);
7116 } else {
7117 // Didn't fit in a register. Do it in pieces.
7118 int32_t low_v = Low32Bits(v);
7119 int32_t high_v = High32Bits(v);
7120 __ movl(addr_low, Immediate(low_v));
7121 MaybeRecordImplicitNullCheck(instruction);
7122 __ movl(addr_high, Immediate(high_v));
7123 }
7124}
7125
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007126void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7127 const uint8_t* roots_data,
7128 const PatchInfo<Label>& info,
7129 uint64_t index_in_table) const {
7130 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7131 uintptr_t address =
7132 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7133 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7134 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7135 dchecked_integral_cast<uint32_t>(address);
7136}
7137
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007138void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7139 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007140 StringReference string_reference(&info.dex_file, dex::StringIndex(info.index));
7141 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007142 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007143 }
7144
7145 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007146 TypeReference type_reference(&info.dex_file, dex::TypeIndex(info.index));
7147 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007148 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007149 }
7150}
7151
Roland Levillain4d027112015-07-01 15:41:14 +01007152#undef __
7153
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007154} // namespace x86_64
7155} // namespace art