blob: d79c2e49113d61223e2b825f1bd11f06140de525 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Vladimir Marko86c87522020-05-11 16:55:55 +010019#include "arch/x86_64/jni_frame_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000020#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010022#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000023#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010024#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010025#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010026#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Nicolas Geoffray4313ccb2020-08-26 17:01:15 +010028#include "interpreter/mterp/nterp.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080029#include "intrinsics.h"
30#include "intrinsics_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000031#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010032#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070033#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070034#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070035#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "mirror/object_reference.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000037#include "scoped_thread_state_change-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010038#include "thread.h"
39#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010040#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010041#include "utils/x86_64/assembler_x86_64.h"
42#include "utils/x86_64/managed_register_x86_64.h"
43
Vladimir Marko0a516052019-10-14 13:00:44 +000044namespace art {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010045
Roland Levillain0d5a2812015-11-13 10:07:31 +000046template<class MirrorType>
47class GcRoot;
48
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010049namespace x86_64 {
50
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010051static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010052static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000053// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
54// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
55// generates less code/data with a small num_entries.
56static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010057
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000058static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000059static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010060
Mark Mendell24f2dfa2015-01-14 19:51:45 -050061static constexpr int kC2ConditionMask = 0x400;
62
Vladimir Marko3232dbb2018-07-25 15:42:46 +010063static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
64 // Custom calling convention: RAX serves as both input and output.
65 RegisterSet caller_saves = RegisterSet::Empty();
66 caller_saves.Add(Location::RegisterLocation(RAX));
67 return caller_saves;
68}
69
Roland Levillain7cbd27f2016-08-11 23:53:33 +010070// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
71#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070072#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073
Andreas Gampe85b62f22015-09-09 13:15:38 -070074class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010075 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000076 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010078 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +000079 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000081 if (instruction_->CanThrowIntoCatchBlock()) {
82 // Live registers will be restored in the catch block if caught.
83 SaveLiveRegisters(codegen, instruction_->GetLocations());
84 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010085 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000086 instruction_,
87 instruction_->GetDexPc(),
88 this);
Roland Levillain888d0672015-11-23 18:53:50 +000089 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010090 }
91
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010092 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010093
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010094 const char* GetDescription() const override { return "NullCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010095
Nicolas Geoffraye5038322014-07-04 09:41:32 +010096 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010097 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
98};
99
Andreas Gampe85b62f22015-09-09 13:15:38 -0700100class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000101 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000102 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000103
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100104 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000105 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000106 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100107 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000108 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000109 }
110
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100111 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100112
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100113 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100114
Calin Juravled0d48522014-11-04 16:40:20 +0000115 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000116 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
117};
118
Andreas Gampe85b62f22015-09-09 13:15:38 -0700119class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000120 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100121 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000122 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000123
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100124 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100126 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 if (is_div_) {
128 __ negl(cpu_reg_);
129 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400130 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 }
132
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000133 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100134 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 if (is_div_) {
136 __ negq(cpu_reg_);
137 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400138 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000139 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000140 }
Calin Juravled0d48522014-11-04 16:40:20 +0000141 __ jmp(GetExitLabel());
142 }
143
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100144 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100145
Calin Juravled0d48522014-11-04 16:40:20 +0000146 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000147 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100148 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000149 const bool is_div_;
150 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000151};
152
Andreas Gampe85b62f22015-09-09 13:15:38 -0700153class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000154 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100155 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000156 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000157
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100158 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700159 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000160 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000161 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700162 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100163 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000164 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700165 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 if (successor_ == nullptr) {
167 __ jmp(GetReturnLabel());
168 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000169 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100170 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000171 }
172
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100173 Label* GetReturnLabel() {
174 DCHECK(successor_ == nullptr);
175 return &return_label_;
176 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100178 HBasicBlock* GetSuccessor() const {
179 return successor_;
180 }
181
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100182 const char* GetDescription() const override { return "SuspendCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100183
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100185 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000186 Label return_label_;
187
188 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
189};
190
Andreas Gampe85b62f22015-09-09 13:15:38 -0700191class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100193 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000194 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100195
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100196 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100197 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000198 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100199 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000200 if (instruction_->CanThrowIntoCatchBlock()) {
201 // Live registers will be restored in the catch block if caught.
202 SaveLiveRegisters(codegen, instruction_->GetLocations());
203 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400204 // Are we using an array length from memory?
205 HInstruction* array_length = instruction_->InputAt(1);
206 Location length_loc = locations->InAt(1);
207 InvokeRuntimeCallingConvention calling_convention;
208 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
209 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100210 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100211 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 Location array_loc = array_length->GetLocations()->InAt(0);
213 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
214 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
215 // Check for conflicts with index.
216 if (length_loc.Equals(locations->InAt(0))) {
217 // We know we aren't using parameter 2.
218 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
219 }
220 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100221 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100222 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700223 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400224 }
225
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000226 // We're moving two locations to locations that could overlap, so we need a parallel
227 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000228 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000230 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100231 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400232 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100233 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100235 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
236 ? kQuickThrowStringBounds
237 : kQuickThrowArrayBounds;
238 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100239 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000240 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100241 }
242
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100243 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100244
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100245 const char* GetDescription() const override { return "BoundsCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100246
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100247 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100248 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
249};
250
Andreas Gampe85b62f22015-09-09 13:15:38 -0700251class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100253 LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
254 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100256 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000257 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100258
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100259 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000260 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100261 Location out = locations->Out();
262 const uint32_t dex_pc = instruction_->GetDexPc();
263 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
264 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
265
Roland Levillain0d5a2812015-11-13 10:07:31 +0000266 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100267 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000268 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269
Vladimir Markoea4c1262017-02-06 19:59:33 +0000270 // Custom calling convention: RAX serves as both input and output.
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100271 if (must_resolve_type) {
272 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_64_codegen->GetGraph()->GetDexFile()));
273 dex::TypeIndex type_index = cls_->GetTypeIndex();
274 __ movl(CpuRegister(RAX), Immediate(type_index.index_));
Vladimir Marko8f63f102020-09-28 12:10:28 +0100275 if (cls_->NeedsAccessCheck()) {
276 CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
277 x86_64_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
278 } else {
279 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
280 x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
281 }
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100282 // If we also must_do_clinit, the resolved type is now in the correct register.
283 } else {
284 DCHECK(must_do_clinit);
285 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
286 x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
287 }
288 if (must_do_clinit) {
289 x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
290 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000291 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100292
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000293 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000294 if (out.IsValid()) {
295 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000296 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000297 }
298
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000299 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100300 __ jmp(GetExitLabel());
301 }
302
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100303 const char* GetDescription() const override { return "LoadClassSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100304
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100305 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000306 // The class this slow path will load.
307 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100308
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000309 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100310};
311
Vladimir Markoaad75c62016-10-03 08:46:48 +0000312class LoadStringSlowPathX86_64 : public SlowPathCode {
313 public:
314 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
315
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100316 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000317 LocationSummary* locations = instruction_->GetLocations();
318 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
319
320 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
321 __ Bind(GetEntryLabel());
322 SaveLiveRegisters(codegen, locations);
323
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000324 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100325 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000326 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000327 x86_64_codegen->InvokeRuntime(kQuickResolveString,
328 instruction_,
329 instruction_->GetDexPc(),
330 this);
331 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
332 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
333 RestoreLiveRegisters(codegen, locations);
334
Vladimir Markoaad75c62016-10-03 08:46:48 +0000335 __ jmp(GetExitLabel());
336 }
337
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100338 const char* GetDescription() const override { return "LoadStringSlowPathX86_64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000339
340 private:
341 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
342};
343
Andreas Gampe85b62f22015-09-09 13:15:38 -0700344class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000345 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000346 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000347 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000348
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100349 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000350 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100351 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000352 DCHECK(instruction_->IsCheckCast()
353 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000354
Roland Levillain0d5a2812015-11-13 10:07:31 +0000355 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000356 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000357
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000358 if (kPoisonHeapReferences &&
359 instruction_->IsCheckCast() &&
360 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
361 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
362 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
363 }
364
Vladimir Marko87584542017-12-12 17:47:52 +0000365 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000366 SaveLiveRegisters(codegen, locations);
367 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000368
369 // We're moving two locations to locations that could overlap, so we need a parallel
370 // move resolver.
371 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800372 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800373 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100374 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800375 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100377 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000378 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100379 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800380 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000381 } else {
382 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800383 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
384 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000385 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000386
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 if (!is_fatal_) {
388 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000389 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000390 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000391
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000392 RestoreLiveRegisters(codegen, locations);
393 __ jmp(GetExitLabel());
394 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000395 }
396
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100397 const char* GetDescription() const override { return "TypeCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100398
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100399 bool IsFatal() const override { return is_fatal_; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000400
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000401 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000402 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000403
404 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
405};
406
Andreas Gampe85b62f22015-09-09 13:15:38 -0700407class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700408 public:
Aart Bik42249c32016-01-07 15:33:50 -0800409 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000410 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700411
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100412 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000413 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700414 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100415 LocationSummary* locations = instruction_->GetLocations();
416 SaveLiveRegisters(codegen, locations);
417 InvokeRuntimeCallingConvention calling_convention;
418 x86_64_codegen->Load32BitValue(
419 CpuRegister(calling_convention.GetRegisterAt(0)),
420 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100421 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100422 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700423 }
424
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100425 const char* GetDescription() const override { return "DeoptimizationSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100426
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700427 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700428 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
429};
430
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100431class ArraySetSlowPathX86_64 : public SlowPathCode {
432 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000433 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100434
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100435 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100436 LocationSummary* locations = instruction_->GetLocations();
437 __ Bind(GetEntryLabel());
438 SaveLiveRegisters(codegen, locations);
439
440 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100441 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100442 parallel_move.AddMove(
443 locations->InAt(0),
444 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100445 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100446 nullptr);
447 parallel_move.AddMove(
448 locations->InAt(1),
449 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100450 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100451 nullptr);
452 parallel_move.AddMove(
453 locations->InAt(2),
454 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100455 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100456 nullptr);
457 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
458
Roland Levillain0d5a2812015-11-13 10:07:31 +0000459 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100460 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000461 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100462 RestoreLiveRegisters(codegen, locations);
463 __ jmp(GetExitLabel());
464 }
465
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100466 const char* GetDescription() const override { return "ArraySetSlowPathX86_64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100467
468 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100469 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
470};
471
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100472// Slow path marking an object reference `ref` during a read
473// barrier. The field `obj.field` in the object `obj` holding this
474// reference does not get updated by this slow path after marking (see
475// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
476//
477// This means that after the execution of this slow path, `ref` will
478// always be up-to-date, but `obj.field` may not; i.e., after the
479// flip, `ref` will be a to-space reference, but `obj.field` will
480// probably still be a from-space reference (unless it gets updated by
481// another thread, or if another thread installed another object
482// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000483class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
484 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100485 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
486 Location ref,
487 bool unpoison_ref_before_marking)
488 : SlowPathCode(instruction),
489 ref_(ref),
490 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000491 DCHECK(kEmitCompilerReadBarrier);
492 }
493
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100494 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86_64"; }
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100496 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000497 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100498 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
499 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000500 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100501 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000502 DCHECK(instruction_->IsInstanceFieldGet() ||
503 instruction_->IsStaticFieldGet() ||
504 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100505 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000506 instruction_->IsLoadClass() ||
507 instruction_->IsLoadString() ||
508 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100509 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100510 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
511 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000512 << "Unexpected instruction in read barrier marking slow path: "
513 << instruction_->DebugName();
514
515 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100516 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000517 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100518 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000519 }
Roland Levillain4359e612016-07-20 11:32:19 +0100520 // No need to save live registers; it's taken care of by the
521 // entrypoint. Also, there is no need to update the stack mask,
522 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000523 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100524 DCHECK_NE(ref_reg, RSP);
525 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100526 // "Compact" slow path, saving two moves.
527 //
528 // Instead of using the standard runtime calling convention (input
529 // and output in R0):
530 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100531 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100532 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100533 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100534 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100535 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100536 // of a dedicated entrypoint:
537 //
538 // rX <- ReadBarrierMarkRegX(rX)
539 //
540 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100541 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100542 // This runtime call does not require a stack map.
543 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000544 __ jmp(GetExitLabel());
545 }
546
547 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100548 // The location (register) of the marked object reference.
549 const Location ref_;
550 // Should the reference in `ref_` be unpoisoned prior to marking it?
551 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000552
553 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
554};
555
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100556// Slow path marking an object reference `ref` during a read barrier,
557// and if needed, atomically updating the field `obj.field` in the
558// object `obj` holding this reference after marking (contrary to
559// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
560// `obj.field`).
561//
562// This means that after the execution of this slow path, both `ref`
563// and `obj.field` will be up-to-date; i.e., after the flip, both will
564// hold the same to-space reference (unless another thread installed
565// another object reference (different from `ref`) in `obj.field`).
566class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
567 public:
568 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
569 Location ref,
570 CpuRegister obj,
571 const Address& field_addr,
572 bool unpoison_ref_before_marking,
573 CpuRegister temp1,
574 CpuRegister temp2)
575 : SlowPathCode(instruction),
576 ref_(ref),
577 obj_(obj),
578 field_addr_(field_addr),
579 unpoison_ref_before_marking_(unpoison_ref_before_marking),
580 temp1_(temp1),
581 temp2_(temp2) {
582 DCHECK(kEmitCompilerReadBarrier);
583 }
584
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100585 const char* GetDescription() const override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100586 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
587 }
588
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100589 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100590 LocationSummary* locations = instruction_->GetLocations();
591 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
592 Register ref_reg = ref_cpu_reg.AsRegister();
593 DCHECK(locations->CanCall());
594 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
595 // This slow path is only used by the UnsafeCASObject intrinsic.
596 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
597 << "Unexpected instruction in read barrier marking and field updating slow path: "
598 << instruction_->DebugName();
599 DCHECK(instruction_->GetLocations()->Intrinsified());
600 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
601
602 __ Bind(GetEntryLabel());
603 if (unpoison_ref_before_marking_) {
604 // Object* ref = ref_addr->AsMirrorPtr()
605 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
606 }
607
608 // Save the old (unpoisoned) reference.
609 __ movl(temp1_, ref_cpu_reg);
610
611 // No need to save live registers; it's taken care of by the
612 // entrypoint. Also, there is no need to update the stack mask,
613 // as this runtime call will not trigger a garbage collection.
614 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
615 DCHECK_NE(ref_reg, RSP);
616 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
617 // "Compact" slow path, saving two moves.
618 //
619 // Instead of using the standard runtime calling convention (input
620 // and output in R0):
621 //
622 // RDI <- ref
623 // RAX <- ReadBarrierMark(RDI)
624 // ref <- RAX
625 //
626 // we just use rX (the register containing `ref`) as input and output
627 // of a dedicated entrypoint:
628 //
629 // rX <- ReadBarrierMarkRegX(rX)
630 //
631 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100632 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100633 // This runtime call does not require a stack map.
634 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
635
636 // If the new reference is different from the old reference,
637 // update the field in the holder (`*field_addr`).
638 //
639 // Note that this field could also hold a different object, if
640 // another thread had concurrently changed it. In that case, the
641 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
642 // operation below would abort the CAS, leaving the field as-is.
643 NearLabel done;
644 __ cmpl(temp1_, ref_cpu_reg);
645 __ j(kEqual, &done);
646
647 // Update the the holder's field atomically. This may fail if
648 // mutator updates before us, but it's OK. This is achived
649 // using a strong compare-and-set (CAS) operation with relaxed
650 // memory synchronization ordering, where the expected value is
651 // the old reference and the desired value is the new reference.
652 // This operation is implemented with a 32-bit LOCK CMPXLCHG
653 // instruction, which requires the expected value (the old
654 // reference) to be in EAX. Save RAX beforehand, and move the
655 // expected value (stored in `temp1_`) into EAX.
656 __ movq(temp2_, CpuRegister(RAX));
657 __ movl(CpuRegister(RAX), temp1_);
658
659 // Convenience aliases.
660 CpuRegister base = obj_;
661 CpuRegister expected = CpuRegister(RAX);
662 CpuRegister value = ref_cpu_reg;
663
664 bool base_equals_value = (base.AsRegister() == value.AsRegister());
665 Register value_reg = ref_reg;
666 if (kPoisonHeapReferences) {
667 if (base_equals_value) {
668 // If `base` and `value` are the same register location, move
669 // `value_reg` to a temporary register. This way, poisoning
670 // `value_reg` won't invalidate `base`.
671 value_reg = temp1_.AsRegister();
672 __ movl(CpuRegister(value_reg), base);
673 }
674
675 // Check that the register allocator did not assign the location
676 // of `expected` (RAX) to `value` nor to `base`, so that heap
677 // poisoning (when enabled) works as intended below.
678 // - If `value` were equal to `expected`, both references would
679 // be poisoned twice, meaning they would not be poisoned at
680 // all, as heap poisoning uses address negation.
681 // - If `base` were equal to `expected`, poisoning `expected`
682 // would invalidate `base`.
683 DCHECK_NE(value_reg, expected.AsRegister());
684 DCHECK_NE(base.AsRegister(), expected.AsRegister());
685
686 __ PoisonHeapReference(expected);
687 __ PoisonHeapReference(CpuRegister(value_reg));
688 }
689
690 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
691
692 // If heap poisoning is enabled, we need to unpoison the values
693 // that were poisoned earlier.
694 if (kPoisonHeapReferences) {
695 if (base_equals_value) {
696 // `value_reg` has been moved to a temporary register, no need
697 // to unpoison it.
698 } else {
699 __ UnpoisonHeapReference(CpuRegister(value_reg));
700 }
701 // No need to unpoison `expected` (RAX), as it is be overwritten below.
702 }
703
704 // Restore RAX.
705 __ movq(CpuRegister(RAX), temp2_);
706
707 __ Bind(&done);
708 __ jmp(GetExitLabel());
709 }
710
711 private:
712 // The location (register) of the marked object reference.
713 const Location ref_;
714 // The register containing the object holding the marked object reference field.
715 const CpuRegister obj_;
716 // The address of the marked reference field. The base of this address must be `obj_`.
717 const Address field_addr_;
718
719 // Should the reference in `ref_` be unpoisoned prior to marking it?
720 const bool unpoison_ref_before_marking_;
721
722 const CpuRegister temp1_;
723 const CpuRegister temp2_;
724
725 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
726};
727
Roland Levillain0d5a2812015-11-13 10:07:31 +0000728// Slow path generating a read barrier for a heap reference.
729class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
730 public:
731 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
732 Location out,
733 Location ref,
734 Location obj,
735 uint32_t offset,
736 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000737 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000738 out_(out),
739 ref_(ref),
740 obj_(obj),
741 offset_(offset),
742 index_(index) {
743 DCHECK(kEmitCompilerReadBarrier);
744 // If `obj` is equal to `out` or `ref`, it means the initial
745 // object has been overwritten by (or after) the heap object
746 // reference load to be instrumented, e.g.:
747 //
748 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000749 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000750 //
751 // In that case, we have lost the information about the original
752 // object, and the emitted read barrier cannot work properly.
753 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
754 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
755}
756
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100757 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000758 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
759 LocationSummary* locations = instruction_->GetLocations();
760 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
761 DCHECK(locations->CanCall());
762 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100763 DCHECK(instruction_->IsInstanceFieldGet() ||
764 instruction_->IsStaticFieldGet() ||
765 instruction_->IsArrayGet() ||
766 instruction_->IsInstanceOf() ||
767 instruction_->IsCheckCast() ||
Vladimir Marko94d2c812020-11-05 10:04:45 +0000768 (instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000769 << "Unexpected instruction in read barrier for heap reference slow path: "
770 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000771
772 __ Bind(GetEntryLabel());
773 SaveLiveRegisters(codegen, locations);
774
775 // We may have to change the index's value, but as `index_` is a
776 // constant member (like other "inputs" of this slow path),
777 // introduce a copy of it, `index`.
778 Location index = index_;
779 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100780 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000781 if (instruction_->IsArrayGet()) {
782 // Compute real offset and store it in index_.
783 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
784 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
785 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
786 // We are about to change the value of `index_reg` (see the
787 // calls to art::x86_64::X86_64Assembler::shll and
788 // art::x86_64::X86_64Assembler::AddImmediate below), but it
789 // has not been saved by the previous call to
790 // art::SlowPathCode::SaveLiveRegisters, as it is a
791 // callee-save register --
792 // art::SlowPathCode::SaveLiveRegisters does not consider
793 // callee-save registers, as it has been designed with the
794 // assumption that callee-save registers are supposed to be
795 // handled by the called function. So, as a callee-save
796 // register, `index_reg` _would_ eventually be saved onto
797 // the stack, but it would be too late: we would have
798 // changed its value earlier. Therefore, we manually save
799 // it here into another freely available register,
800 // `free_reg`, chosen of course among the caller-save
801 // registers (as a callee-save `free_reg` register would
802 // exhibit the same problem).
803 //
804 // Note we could have requested a temporary register from
805 // the register allocator instead; but we prefer not to, as
806 // this is a slow path, and we know we can find a
807 // caller-save register that is available.
808 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
809 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
810 index_reg = free_reg;
811 index = Location::RegisterLocation(index_reg);
812 } else {
813 // The initial register stored in `index_` has already been
814 // saved in the call to art::SlowPathCode::SaveLiveRegisters
815 // (as it is not a callee-save register), so we can freely
816 // use it.
817 }
818 // Shifting the index value contained in `index_reg` by the
819 // scale factor (2) cannot overflow in practice, as the
820 // runtime is unable to allocate object arrays with a size
821 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
822 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
823 static_assert(
824 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
825 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
826 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
827 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100828 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
829 // intrinsics, `index_` is not shifted by a scale factor of 2
830 // (as in the case of ArrayGet), as it is actually an offset
831 // to an object field within an object.
832 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000833 DCHECK(instruction_->GetLocations()->Intrinsified());
834 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
835 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
836 << instruction_->AsInvoke()->GetIntrinsic();
837 DCHECK_EQ(offset_, 0U);
838 DCHECK(index_.IsRegister());
839 }
840 }
841
842 // We're moving two or three locations to locations that could
843 // overlap, so we need a parallel move resolver.
844 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100845 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000846 parallel_move.AddMove(ref_,
847 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100848 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000849 nullptr);
850 parallel_move.AddMove(obj_,
851 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100852 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000853 nullptr);
854 if (index.IsValid()) {
855 parallel_move.AddMove(index,
856 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100857 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000858 nullptr);
859 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
860 } else {
861 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
862 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
863 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100864 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000865 instruction_,
866 instruction_->GetDexPc(),
867 this);
868 CheckEntrypointTypes<
869 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
870 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
871
872 RestoreLiveRegisters(codegen, locations);
873 __ jmp(GetExitLabel());
874 }
875
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100876 const char* GetDescription() const override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000877 return "ReadBarrierForHeapReferenceSlowPathX86_64";
878 }
879
880 private:
881 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
882 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
883 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
884 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
885 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
886 return static_cast<CpuRegister>(i);
887 }
888 }
889 // We shall never fail to find a free caller-save register, as
890 // there are more than two core caller-save registers on x86-64
891 // (meaning it is possible to find one which is different from
892 // `ref` and `obj`).
893 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
894 LOG(FATAL) << "Could not find a free caller-save register";
895 UNREACHABLE();
896 }
897
Roland Levillain0d5a2812015-11-13 10:07:31 +0000898 const Location out_;
899 const Location ref_;
900 const Location obj_;
901 const uint32_t offset_;
902 // An additional location containing an index to an array.
903 // Only used for HArrayGet and the UnsafeGetObject &
904 // UnsafeGetObjectVolatile intrinsics.
905 const Location index_;
906
907 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
908};
909
910// Slow path generating a read barrier for a GC root.
911class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
912 public:
913 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000914 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000915 DCHECK(kEmitCompilerReadBarrier);
916 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000917
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100918 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000919 LocationSummary* locations = instruction_->GetLocations();
920 DCHECK(locations->CanCall());
921 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000922 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
923 << "Unexpected instruction in read barrier for GC root slow path: "
924 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000925
926 __ Bind(GetEntryLabel());
927 SaveLiveRegisters(codegen, locations);
928
929 InvokeRuntimeCallingConvention calling_convention;
930 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
931 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100932 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000933 instruction_,
934 instruction_->GetDexPc(),
935 this);
936 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
937 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
938
939 RestoreLiveRegisters(codegen, locations);
940 __ jmp(GetExitLabel());
941 }
942
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100943 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86_64"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000944
945 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000946 const Location out_;
947 const Location root_;
948
949 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
950};
951
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100952#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100953// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
954#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100955
Roland Levillain4fa13f62015-07-06 18:11:54 +0100956inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700957 switch (cond) {
958 case kCondEQ: return kEqual;
959 case kCondNE: return kNotEqual;
960 case kCondLT: return kLess;
961 case kCondLE: return kLessEqual;
962 case kCondGT: return kGreater;
963 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700964 case kCondB: return kBelow;
965 case kCondBE: return kBelowEqual;
966 case kCondA: return kAbove;
967 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700968 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100969 LOG(FATAL) << "Unreachable";
970 UNREACHABLE();
971}
972
Aart Bike9f37602015-10-09 11:15:55 -0700973// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100974inline Condition X86_64FPCondition(IfCondition cond) {
975 switch (cond) {
976 case kCondEQ: return kEqual;
977 case kCondNE: return kNotEqual;
978 case kCondLT: return kBelow;
979 case kCondLE: return kBelowEqual;
980 case kCondGT: return kAbove;
981 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700982 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800983 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100984 LOG(FATAL) << "Unreachable";
985 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700986}
987
Vladimir Marko86c87522020-05-11 16:55:55 +0100988void CodeGeneratorX86_64::BlockNonVolatileXmmRegisters(LocationSummary* locations) {
989 // We have to ensure that the native code we call directly (such as @CriticalNative
990 // or some intrinsic helpers, say Math.sin()) doesn't clobber the XMM registers
991 // which are non-volatile for ART, but volatile for Native calls. This will ensure
992 // that they are saved in the prologue and properly restored.
993 for (FloatRegister fp_reg : non_volatile_xmm_regs) {
994 locations->AddTemp(Location::FpuRegisterLocation(fp_reg));
995 }
996}
997
Vladimir Markodc151b22015-10-15 18:02:30 +0100998HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
999 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01001000 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +00001001 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01001002}
1003
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001004void CodeGeneratorX86_64::LoadMethod(MethodLoadKind load_kind, Location temp, HInvoke* invoke) {
1005 switch (load_kind) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001006 case MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01001007 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01001008 __ leal(temp.AsRegister<CpuRegister>(),
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001009 Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001010 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01001011 break;
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001012 case MethodLoadKind::kBootImageRelRo: {
Vladimir Markob066d432018-01-03 13:14:37 +00001013 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1014 __ movl(temp.AsRegister<CpuRegister>(),
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001015 Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001016 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001017 break;
1018 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001019 case MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001020 __ movq(temp.AsRegister<CpuRegister>(),
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001021 Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001022 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01001023 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko58155012015-08-19 12:49:41 +00001024 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001025 }
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001026 case MethodLoadKind::kJitDirectAddress: {
1027 Load64BitValue(temp.AsRegister<CpuRegister>(),
1028 reinterpret_cast<int64_t>(invoke->GetResolvedMethod()));
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001029 break;
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001030 }
1031 case MethodLoadKind::kRuntimeCall: {
1032 // Test situation, don't do anything.
1033 break;
1034 }
1035 default: {
1036 LOG(FATAL) << "Load kind should have already been handled " << load_kind;
1037 UNREACHABLE();
1038 }
1039 }
1040}
1041
1042void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
1043 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
1044 // All registers are assumed to be correctly set up.
1045
1046 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
1047 switch (invoke->GetMethodLoadKind()) {
1048 case MethodLoadKind::kStringInit: {
1049 // temp = thread->string_init_entrypoint
1050 uint32_t offset =
1051 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
1052 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
1053 break;
1054 }
1055 case MethodLoadKind::kRecursive: {
1056 callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
1057 break;
1058 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001059 case MethodLoadKind::kRuntimeCall: {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001060 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1061 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001062 }
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001063 case MethodLoadKind::kBootImageLinkTimePcRelative:
1064 // For kCallCriticalNative we skip loading the method and do the call directly.
1065 if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
1066 break;
1067 }
1068 FALLTHROUGH_INTENDED;
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001069 default: {
1070 LoadMethod(invoke->GetMethodLoadKind(), temp, invoke);
1071 break;
1072 }
Vladimir Marko58155012015-08-19 12:49:41 +00001073 }
1074
1075 switch (invoke->GetCodePtrLocation()) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001076 case CodePtrLocation::kCallSelf:
Vladimir Marko58155012015-08-19 12:49:41 +00001077 __ call(&frame_entry_label_);
Vladimir Marko86c87522020-05-11 16:55:55 +01001078 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Vladimir Marko58155012015-08-19 12:49:41 +00001079 break;
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001080 case CodePtrLocation::kCallCriticalNative: {
Vladimir Marko86c87522020-05-11 16:55:55 +01001081 size_t out_frame_size =
1082 PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorX86_64,
1083 kNativeStackAlignment,
Vladimir Markodec78172020-06-19 15:31:23 +01001084 GetCriticalNativeDirectCallFrameSize>(invoke);
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001085 if (invoke->GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative) {
1086 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
1087 __ call(Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
1088 RecordBootImageJniEntrypointPatch(invoke);
1089 } else {
1090 // (callee_method + offset_of_jni_entry_point)()
1091 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1092 ArtMethod::EntryPointFromJniOffset(kX86_64PointerSize).SizeValue()));
1093 }
Vladimir Marko86c87522020-05-11 16:55:55 +01001094 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
1095 // Zero-/sign-extend the result when needed due to native and managed ABI mismatch.
1096 switch (invoke->GetType()) {
1097 case DataType::Type::kBool:
1098 __ movzxb(CpuRegister(RAX), CpuRegister(RAX));
1099 break;
1100 case DataType::Type::kInt8:
1101 __ movsxb(CpuRegister(RAX), CpuRegister(RAX));
1102 break;
1103 case DataType::Type::kUint16:
1104 __ movzxw(CpuRegister(RAX), CpuRegister(RAX));
1105 break;
1106 case DataType::Type::kInt16:
1107 __ movsxw(CpuRegister(RAX), CpuRegister(RAX));
1108 break;
1109 case DataType::Type::kInt32:
1110 case DataType::Type::kInt64:
1111 case DataType::Type::kFloat32:
1112 case DataType::Type::kFloat64:
1113 case DataType::Type::kVoid:
1114 break;
1115 default:
1116 DCHECK(false) << invoke->GetType();
1117 break;
1118 }
1119 if (out_frame_size != 0u) {
Vladimir Markodec78172020-06-19 15:31:23 +01001120 DecreaseFrame(out_frame_size);
Vladimir Marko86c87522020-05-11 16:55:55 +01001121 }
1122 break;
1123 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001124 case CodePtrLocation::kCallArtMethod:
Vladimir Marko58155012015-08-19 12:49:41 +00001125 // (callee_method + offset_of_quick_compiled_code)()
1126 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1127 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001128 kX86_64PointerSize).SizeValue()));
Vladimir Marko86c87522020-05-11 16:55:55 +01001129 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Vladimir Marko58155012015-08-19 12:49:41 +00001130 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001131 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001132
1133 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001134}
1135
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001136void CodeGeneratorX86_64::GenerateVirtualCall(
1137 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001138 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1139 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1140 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001141
1142 // Use the calling convention instead of the location of the receiver, as
1143 // intrinsics may have put the receiver in a different register. In the intrinsics
1144 // slow path, the arguments have been moved to the right place, so here we are
1145 // guaranteed that the receiver is the first register of the calling convention.
1146 InvokeDexCallingConvention calling_convention;
1147 Register receiver = calling_convention.GetRegisterAt(0);
1148
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001149 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001150 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001151 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001152 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001153 // Instead of simply (possibly) unpoisoning `temp` here, we should
1154 // emit a read barrier for the previous class reference load.
1155 // However this is not required in practice, as this is an
1156 // intermediate/temporary reference and because the current
1157 // concurrent copying collector keeps the from-space memory
1158 // intact/accessible until the end of the marking phase (the
1159 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001160 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001161
Nicolas Geoffray20036d82019-11-28 16:15:00 +00001162 MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001163
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001164 // temp = temp->GetMethodAt(method_offset);
1165 __ movq(temp, Address(temp, method_offset));
1166 // call temp->GetEntryPoint();
1167 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001168 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001169 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001170}
1171
Vladimir Marko6fd16062018-06-26 11:02:04 +01001172void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001173 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, intrinsic_data);
1174 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001175}
1176
Vladimir Markob066d432018-01-03 13:14:37 +00001177void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001178 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, boot_image_offset);
1179 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00001180}
1181
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001182void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvoke* invoke) {
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01001183 boot_image_method_patches_.emplace_back(invoke->GetResolvedMethodReference().dex_file,
1184 invoke->GetResolvedMethodReference().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001185 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001186}
1187
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001188void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvoke* invoke) {
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01001189 DCHECK(IsSameDexFile(GetGraph()->GetDexFile(), *invoke->GetMethodReference().dex_file));
1190 method_bss_entry_patches_.emplace_back(invoke->GetMethodReference().dex_file,
1191 invoke->GetMethodReference().index);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001192 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001193}
1194
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001195void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1196 boot_image_type_patches_.emplace_back(
1197 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001198 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001199}
1200
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001201Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko8f63f102020-09-28 12:10:28 +01001202 ArenaDeque<PatchInfo<Label>>* patches = nullptr;
1203 switch (load_class->GetLoadKind()) {
1204 case HLoadClass::LoadKind::kBssEntry:
1205 patches = &type_bss_entry_patches_;
1206 break;
1207 case HLoadClass::LoadKind::kBssEntryPublic:
1208 patches = &public_type_bss_entry_patches_;
1209 break;
1210 case HLoadClass::LoadKind::kBssEntryPackage:
1211 patches = &package_type_bss_entry_patches_;
1212 break;
1213 default:
1214 LOG(FATAL) << "Unexpected load kind: " << load_class->GetLoadKind();
1215 UNREACHABLE();
1216 }
1217 patches->emplace_back(&load_class->GetDexFile(), load_class->GetTypeIndex().index_);
1218 return &patches->back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001219}
1220
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001221void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1222 boot_image_string_patches_.emplace_back(
1223 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1224 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001225}
1226
Vladimir Markoaad75c62016-10-03 08:46:48 +00001227Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001228 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001229 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001230 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001231}
1232
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001233void CodeGeneratorX86_64::RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke) {
1234 boot_image_jni_entrypoint_patches_.emplace_back(invoke->GetResolvedMethodReference().dex_file,
1235 invoke->GetResolvedMethodReference().index);
1236 __ Bind(&boot_image_jni_entrypoint_patches_.back().label);
1237}
1238
Vladimir Marko6fd16062018-06-26 11:02:04 +01001239void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
1240 if (GetCompilerOptions().IsBootImage()) {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001241 __ leal(reg,
1242 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001243 RecordBootImageIntrinsicPatch(boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001244 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001245 __ movl(reg,
1246 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001247 RecordBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001248 } else {
Vladimir Marko695348f2020-05-19 14:42:02 +01001249 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001250 gc::Heap* heap = Runtime::Current()->GetHeap();
1251 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001252 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01001253 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
1254 }
1255}
1256
Vladimir Markode91ca92020-10-27 13:41:40 +00001257void CodeGeneratorX86_64::LoadIntrinsicDeclaringClass(CpuRegister reg, HInvoke* invoke) {
1258 DCHECK_NE(invoke->GetIntrinsic(), Intrinsics::kNone);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001259 if (GetCompilerOptions().IsBootImage()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001260 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
Vladimir Markode91ca92020-10-27 13:41:40 +00001261 __ leal(reg,
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001262 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01001263 MethodReference target_method = invoke->GetResolvedMethodReference();
Vladimir Marko6fd16062018-06-26 11:02:04 +01001264 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1265 boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_);
1266 __ Bind(&boot_image_type_patches_.back().label);
1267 } else {
Vladimir Markode91ca92020-10-27 13:41:40 +00001268 uint32_t boot_image_offset = GetBootImageOffsetOfIntrinsicDeclaringClass(invoke);
1269 LoadBootImageAddress(reg, boot_image_offset);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001270 }
Vladimir Marko6fd16062018-06-26 11:02:04 +01001271}
1272
Vladimir Markoaad75c62016-10-03 08:46:48 +00001273// The label points to the end of the "movl" or another instruction but the literal offset
1274// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1275constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1276
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001277template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001278inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1279 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001280 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001281 for (const PatchInfo<Label>& info : infos) {
1282 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1283 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001284 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001285 }
1286}
1287
Vladimir Marko6fd16062018-06-26 11:02:04 +01001288template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1289linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1290 const DexFile* target_dex_file,
1291 uint32_t pc_insn_offset,
1292 uint32_t boot_image_offset) {
1293 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1294 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001295}
1296
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001297void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001298 DCHECK(linker_patches->empty());
1299 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001300 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001301 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001302 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001303 type_bss_entry_patches_.size() +
Vladimir Marko8f63f102020-09-28 12:10:28 +01001304 public_type_bss_entry_patches_.size() +
1305 package_type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001306 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001307 string_bss_entry_patches_.size() +
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001308 boot_image_jni_entrypoint_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01001309 boot_image_other_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001310 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01001311 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001312 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1313 boot_image_method_patches_, linker_patches);
1314 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1315 boot_image_type_patches_, linker_patches);
1316 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001317 boot_image_string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001318 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001319 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001320 DCHECK(boot_image_type_patches_.empty());
1321 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01001322 }
1323 if (GetCompilerOptions().IsBootImage()) {
1324 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1325 boot_image_other_patches_, linker_patches);
1326 } else {
1327 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
1328 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001329 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001330 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1331 method_bss_entry_patches_, linker_patches);
1332 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1333 type_bss_entry_patches_, linker_patches);
Vladimir Marko8f63f102020-09-28 12:10:28 +01001334 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PublicTypeBssEntryPatch>(
1335 public_type_bss_entry_patches_, linker_patches);
1336 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PackageTypeBssEntryPatch>(
1337 package_type_bss_entry_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001338 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1339 string_bss_entry_patches_, linker_patches);
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001340 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
1341 boot_image_jni_entrypoint_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001342 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001343}
1344
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001345void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001346 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001347}
1348
1349void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001350 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001351}
1352
Vladimir Markoa0431112018-06-25 09:32:54 +01001353const X86_64InstructionSetFeatures& CodeGeneratorX86_64::GetInstructionSetFeatures() const {
1354 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86_64InstructionSetFeatures();
1355}
1356
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001357size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1358 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1359 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001360}
1361
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001362size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1363 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1364 return kX86_64WordSize;
1365}
1366
1367size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001368 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001369 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001370 } else {
1371 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1372 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001373 return GetSlowPathFPWidth();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001374}
1375
1376size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001377 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001378 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001379 } else {
1380 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1381 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001382 return GetSlowPathFPWidth();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001383}
1384
Calin Juravle175dc732015-08-25 15:42:32 +01001385void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1386 HInstruction* instruction,
1387 uint32_t dex_pc,
1388 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001389 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001390 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1391 if (EntrypointRequiresStackMap(entrypoint)) {
1392 RecordPcInfo(instruction, dex_pc, slow_path);
1393 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001394}
1395
Roland Levillaindec8f632016-07-22 17:10:06 +01001396void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1397 HInstruction* instruction,
1398 SlowPathCode* slow_path) {
1399 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001400 GenerateInvokeRuntime(entry_point_offset);
1401}
1402
1403void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001404 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip= */ true));
Roland Levillaindec8f632016-07-22 17:10:06 +01001405}
1406
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001407static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001408// Use a fake return address register to mimic Quick.
1409static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001410CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001411 const CompilerOptions& compiler_options,
1412 OptimizingCompilerStats* stats)
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001413 : CodeGenerator(graph,
1414 kNumberOfCpuRegisters,
1415 kNumberOfFloatRegisters,
1416 kNumberOfCpuRegisterPairs,
1417 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1418 arraysize(kCoreCalleeSaves))
1419 | (1 << kFakeReturnRegister),
1420 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1421 arraysize(kFpuCalleeSaves)),
1422 compiler_options,
1423 stats),
1424 block_labels_(nullptr),
1425 location_builder_(graph, this),
1426 instruction_visitor_(graph, this),
1427 move_resolver_(graph->GetAllocator(), this),
1428 assembler_(graph->GetAllocator()),
1429 constant_area_start_(0),
1430 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1431 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1432 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1433 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1434 public_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1435 package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1436 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1437 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1438 boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1439 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1440 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1441 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1442 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001443 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1444}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001445
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001446InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1447 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001448 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001449 assembler_(codegen->GetAssembler()),
1450 codegen_(codegen) {}
1451
David Brazdil58282f42016-01-14 12:45:10 +00001452void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001453 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001454 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001455
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001456 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001457 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001458}
1459
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001460static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001461 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001462}
David Srbecky9d8606d2015-04-12 09:35:32 +01001463
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001464static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001465 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001466}
1467
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001468void CodeGeneratorX86_64::MaybeIncrementHotness(bool is_frame_entry) {
1469 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1470 NearLabel overflow;
1471 Register method = kMethodRegisterArgument;
1472 if (!is_frame_entry) {
1473 CHECK(RequiresCurrentMethod());
1474 method = TMP;
1475 __ movq(CpuRegister(method), Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1476 }
1477 __ cmpw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1478 Immediate(ArtMethod::MaxCounter()));
1479 __ j(kEqual, &overflow);
1480 __ addw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1481 Immediate(1));
1482 __ Bind(&overflow);
1483 }
1484
1485 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray095dc462020-08-17 16:40:28 +01001486 ScopedProfilingInfoUse spiu(
1487 Runtime::Current()->GetJit(), GetGraph()->GetArtMethod(), Thread::Current());
1488 ProfilingInfo* info = spiu.GetProfilingInfo();
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00001489 if (info != nullptr) {
1490 uint64_t address = reinterpret_cast64<uint64_t>(info);
1491 NearLabel done;
1492 __ movq(CpuRegister(TMP), Immediate(address));
1493 __ addw(Address(CpuRegister(TMP), ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
1494 Immediate(1));
Nicolas Geoffray4313ccb2020-08-26 17:01:15 +01001495 __ andw(Address(CpuRegister(TMP), ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
1496 Immediate(interpreter::kTieredHotnessMask));
1497 __ j(kNotZero, &done);
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00001498 if (HasEmptyFrame()) {
1499 CHECK(is_frame_entry);
1500 // Frame alignment, and the stub expects the method on the stack.
1501 __ pushq(CpuRegister(RDI));
1502 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1503 __ cfi().RelOffset(DWARFReg(RDI), 0);
1504 } else if (!RequiresCurrentMethod()) {
1505 CHECK(is_frame_entry);
1506 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
1507 }
1508 GenerateInvokeRuntime(
1509 GetThreadOffset<kX86_64PointerSize>(kQuickCompileOptimized).Int32Value());
1510 if (HasEmptyFrame()) {
1511 __ popq(CpuRegister(RDI));
1512 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1513 __ cfi().Restore(DWARFReg(RDI));
1514 }
1515 __ Bind(&done);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001516 }
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001517 }
1518}
1519
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001520void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001521 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001522 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001523 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001524 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001525 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001526
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001527
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001528 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001529 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1530 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001531 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001532 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001533
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001534 if (!HasEmptyFrame()) {
1535 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1536 Register reg = kCoreCalleeSaves[i];
1537 if (allocated_registers_.ContainsCoreRegister(reg)) {
1538 __ pushq(CpuRegister(reg));
1539 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1540 __ cfi().RelOffset(DWARFReg(reg), 0);
1541 }
1542 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001543
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001544 int adjust = GetFrameSize() - GetCoreSpillSize();
Vladimir Markodec78172020-06-19 15:31:23 +01001545 IncreaseFrame(adjust);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001546 uint32_t xmm_spill_location = GetFpuSpillStart();
1547 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
1548
1549 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1550 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1551 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1552 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1553 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
1554 }
1555 }
1556
1557 // Save the current method if we need it. Note that we do not
1558 // do this in HCurrentMethod, as the instruction might have been removed
1559 // in the SSA graph.
1560 if (RequiresCurrentMethod()) {
1561 CHECK(!HasEmptyFrame());
1562 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1563 CpuRegister(kMethodRegisterArgument));
1564 }
1565
1566 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1567 CHECK(!HasEmptyFrame());
1568 // Initialize should_deoptimize flag to 0.
1569 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
Nicolas Geoffray98893962015-01-21 12:32:32 +00001570 }
1571 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001572
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001573 MaybeIncrementHotness(/* is_frame_entry= */ true);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001574}
1575
1576void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001577 __ cfi().RememberState();
1578 if (!HasEmptyFrame()) {
1579 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001580 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
David Srbeckyc34dc932015-04-12 09:27:43 +01001581 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1582 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1583 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1584 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1585 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1586 }
1587 }
1588
1589 int adjust = GetFrameSize() - GetCoreSpillSize();
Vladimir Markodec78172020-06-19 15:31:23 +01001590 DecreaseFrame(adjust);
David Srbeckyc34dc932015-04-12 09:27:43 +01001591
1592 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1593 Register reg = kCoreCalleeSaves[i];
1594 if (allocated_registers_.ContainsCoreRegister(reg)) {
1595 __ popq(CpuRegister(reg));
1596 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1597 __ cfi().Restore(DWARFReg(reg));
1598 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001599 }
1600 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001601 __ ret();
1602 __ cfi().RestoreState();
1603 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001604}
1605
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001606void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1607 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001608}
1609
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001610void CodeGeneratorX86_64::Move(Location destination, Location source) {
1611 if (source.Equals(destination)) {
1612 return;
1613 }
1614 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001615 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001616 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001617 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001618 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001619 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001620 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001621 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1622 } else if (source.IsConstant()) {
1623 HConstant* constant = source.GetConstant();
1624 if (constant->IsLongConstant()) {
1625 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1626 } else {
1627 Load32BitValue(dest, GetInt32ValueOf(constant));
1628 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001629 } else {
1630 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001631 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001632 }
1633 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001634 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001635 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001636 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001637 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001638 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1639 } else if (source.IsConstant()) {
1640 HConstant* constant = source.GetConstant();
1641 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1642 if (constant->IsFloatConstant()) {
1643 Load32BitValue(dest, static_cast<int32_t>(value));
1644 } else {
1645 Load64BitValue(dest, value);
1646 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001647 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001648 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001649 } else {
1650 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001651 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001652 }
1653 } else if (destination.IsStackSlot()) {
1654 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001655 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001656 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001657 } else if (source.IsFpuRegister()) {
1658 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001659 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001660 } else if (source.IsConstant()) {
1661 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001662 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001663 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001664 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001665 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001666 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1667 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001668 }
1669 } else {
1670 DCHECK(destination.IsDoubleStackSlot());
1671 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001672 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001673 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001674 } else if (source.IsFpuRegister()) {
1675 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001676 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001677 } else if (source.IsConstant()) {
1678 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001679 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1680 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001681 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001682 } else {
1683 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001684 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1685 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001686 }
1687 }
1688}
1689
Calin Juravle175dc732015-08-25 15:42:32 +01001690void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1691 DCHECK(location.IsRegister());
1692 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1693}
1694
Calin Juravlee460d1d2015-09-29 04:52:17 +01001695void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001696 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001697 Move(dst, src);
1698}
1699
1700void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1701 if (location.IsRegister()) {
1702 locations->AddTemp(location);
1703 } else {
1704 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1705 }
1706}
1707
David Brazdilfc6a86a2015-06-26 10:33:45 +00001708void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001709 if (successor->IsExitBlock()) {
1710 DCHECK(got->GetPrevious()->AlwaysThrows());
1711 return; // no code needed
1712 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001713
1714 HBasicBlock* block = got->GetBlock();
1715 HInstruction* previous = got->GetPrevious();
1716
1717 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001718 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001719 codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001720 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1721 return;
1722 }
1723
1724 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1725 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1726 }
1727 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001728 __ jmp(codegen_->GetLabelOf(successor));
1729 }
1730}
1731
David Brazdilfc6a86a2015-06-26 10:33:45 +00001732void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1733 got->SetLocations(nullptr);
1734}
1735
1736void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1737 HandleGoto(got, got->GetSuccessor());
1738}
1739
1740void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1741 try_boundary->SetLocations(nullptr);
1742}
1743
1744void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1745 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1746 if (!successor->IsExitBlock()) {
1747 HandleGoto(try_boundary, successor);
1748 }
1749}
1750
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001751void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1752 exit->SetLocations(nullptr);
1753}
1754
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001755void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001756}
1757
Mark Mendell152408f2015-12-31 12:28:50 -05001758template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001759void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001760 LabelType* true_label,
1761 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001762 if (cond->IsFPConditionTrueIfNaN()) {
1763 __ j(kUnordered, true_label);
1764 } else if (cond->IsFPConditionFalseIfNaN()) {
1765 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001766 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001767 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001768}
1769
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001770void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001771 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001772
Mark Mendellc4701932015-04-10 13:18:51 -04001773 Location left = locations->InAt(0);
1774 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001775 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001776 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001777 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001778 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001779 case DataType::Type::kInt8:
1780 case DataType::Type::kUint16:
1781 case DataType::Type::kInt16:
1782 case DataType::Type::kInt32:
1783 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001784 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001785 break;
1786 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001787 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001788 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001789 break;
1790 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001791 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001792 if (right.IsFpuRegister()) {
1793 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1794 } else if (right.IsConstant()) {
1795 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1796 codegen_->LiteralFloatAddress(
1797 right.GetConstant()->AsFloatConstant()->GetValue()));
1798 } else {
1799 DCHECK(right.IsStackSlot());
1800 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1801 Address(CpuRegister(RSP), right.GetStackIndex()));
1802 }
Mark Mendellc4701932015-04-10 13:18:51 -04001803 break;
1804 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001805 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001806 if (right.IsFpuRegister()) {
1807 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1808 } else if (right.IsConstant()) {
1809 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1810 codegen_->LiteralDoubleAddress(
1811 right.GetConstant()->AsDoubleConstant()->GetValue()));
1812 } else {
1813 DCHECK(right.IsDoubleStackSlot());
1814 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1815 Address(CpuRegister(RSP), right.GetStackIndex()));
1816 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001817 break;
1818 }
1819 default:
1820 LOG(FATAL) << "Unexpected condition type " << type;
1821 }
1822}
1823
1824template<class LabelType>
1825void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1826 LabelType* true_target_in,
1827 LabelType* false_target_in) {
1828 // Generated branching requires both targets to be explicit. If either of the
1829 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1830 LabelType fallthrough_target;
1831 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1832 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1833
1834 // Generate the comparison to set the CC.
1835 GenerateCompareTest(condition);
1836
1837 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001838 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001839 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001840 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001841 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1842 break;
1843 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001844 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001845 GenerateFPJumps(condition, true_target, false_target);
1846 break;
1847 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001848 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001849 GenerateFPJumps(condition, true_target, false_target);
1850 break;
1851 }
1852 default:
1853 LOG(FATAL) << "Unexpected condition type " << type;
1854 }
1855
David Brazdil0debae72015-11-12 18:37:00 +00001856 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001857 __ jmp(false_target);
1858 }
David Brazdil0debae72015-11-12 18:37:00 +00001859
1860 if (fallthrough_target.IsLinked()) {
1861 __ Bind(&fallthrough_target);
1862 }
Mark Mendellc4701932015-04-10 13:18:51 -04001863}
1864
David Brazdil0debae72015-11-12 18:37:00 +00001865static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1866 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1867 // are set only strictly before `branch`. We can't use the eflags on long
1868 // conditions if they are materialized due to the complex branching.
1869 return cond->IsCondition() &&
1870 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001871 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001872}
1873
Mark Mendell152408f2015-12-31 12:28:50 -05001874template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001875void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001876 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001877 LabelType* true_target,
1878 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001879 HInstruction* cond = instruction->InputAt(condition_input_index);
1880
1881 if (true_target == nullptr && false_target == nullptr) {
1882 // Nothing to do. The code always falls through.
1883 return;
1884 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001885 // Constant condition, statically compared against "true" (integer value 1).
1886 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001887 if (true_target != nullptr) {
1888 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001889 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001890 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001891 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001892 if (false_target != nullptr) {
1893 __ jmp(false_target);
1894 }
1895 }
1896 return;
1897 }
1898
1899 // The following code generates these patterns:
1900 // (1) true_target == nullptr && false_target != nullptr
1901 // - opposite condition true => branch to false_target
1902 // (2) true_target != nullptr && false_target == nullptr
1903 // - condition true => branch to true_target
1904 // (3) true_target != nullptr && false_target != nullptr
1905 // - condition true => branch to true_target
1906 // - branch to false_target
1907 if (IsBooleanValueOrMaterializedCondition(cond)) {
1908 if (AreEflagsSetFrom(cond, instruction)) {
1909 if (true_target == nullptr) {
1910 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1911 } else {
1912 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1913 }
1914 } else {
1915 // Materialized condition, compare against 0.
1916 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1917 if (lhs.IsRegister()) {
1918 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1919 } else {
1920 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1921 }
1922 if (true_target == nullptr) {
1923 __ j(kEqual, false_target);
1924 } else {
1925 __ j(kNotEqual, true_target);
1926 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001927 }
1928 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001929 // Condition has not been materialized, use its inputs as the
1930 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001931 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001932
David Brazdil0debae72015-11-12 18:37:00 +00001933 // If this is a long or FP comparison that has been folded into
1934 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001935 DataType::Type type = condition->InputAt(0)->GetType();
1936 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001937 GenerateCompareTestAndBranch(condition, true_target, false_target);
1938 return;
1939 }
1940
1941 Location lhs = condition->GetLocations()->InAt(0);
1942 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001943 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001944 if (true_target == nullptr) {
1945 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1946 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001947 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001948 }
Dave Allison20dfc792014-06-16 20:44:29 -07001949 }
David Brazdil0debae72015-11-12 18:37:00 +00001950
1951 // If neither branch falls through (case 3), the conditional branch to `true_target`
1952 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1953 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001954 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001955 }
1956}
1957
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001958void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001959 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001960 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001961 locations->SetInAt(0, Location::Any());
1962 }
1963}
1964
1965void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001966 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1967 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1968 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1969 nullptr : codegen_->GetLabelOf(true_successor);
1970 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1971 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001972 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001973}
1974
1975void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001976 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001977 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001978 InvokeRuntimeCallingConvention calling_convention;
1979 RegisterSet caller_saves = RegisterSet::Empty();
1980 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1981 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001982 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001983 locations->SetInAt(0, Location::Any());
1984 }
1985}
1986
1987void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001988 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001989 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001990 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001991 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001992 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001993}
1994
Mingyao Yang063fc772016-08-02 11:02:54 -07001995void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001996 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001997 LocationSummary(flag, LocationSummary::kNoCall);
1998 locations->SetOut(Location::RequiresRegister());
1999}
2000
2001void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2002 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
2003 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
2004}
2005
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002006static bool SelectCanUseCMOV(HSelect* select) {
2007 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002008 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002009 return false;
2010 }
2011
2012 // A FP condition doesn't generate the single CC that we need.
2013 HInstruction* condition = select->GetCondition();
2014 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002015 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002016 return false;
2017 }
2018
2019 // We can generate a CMOV for this Select.
2020 return true;
2021}
2022
David Brazdil74eb1b22015-12-14 11:44:01 +00002023void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002024 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002025 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00002026 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002027 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00002028 } else {
2029 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002030 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05002031 if (select->InputAt(1)->IsConstant()) {
2032 locations->SetInAt(1, Location::RequiresRegister());
2033 } else {
2034 locations->SetInAt(1, Location::Any());
2035 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002036 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002037 locations->SetInAt(1, Location::Any());
2038 }
David Brazdil74eb1b22015-12-14 11:44:01 +00002039 }
2040 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2041 locations->SetInAt(2, Location::RequiresRegister());
2042 }
2043 locations->SetOut(Location::SameAsFirstInput());
2044}
2045
2046void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
2047 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002048 if (SelectCanUseCMOV(select)) {
2049 // If both the condition and the source types are integer, we can generate
2050 // a CMOV to implement Select.
2051 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05002052 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002053 DCHECK(locations->InAt(0).Equals(locations->Out()));
2054
2055 HInstruction* select_condition = select->GetCondition();
2056 Condition cond = kNotEqual;
2057
2058 // Figure out how to test the 'condition'.
2059 if (select_condition->IsCondition()) {
2060 HCondition* condition = select_condition->AsCondition();
2061 if (!condition->IsEmittedAtUseSite()) {
2062 // This was a previously materialized condition.
2063 // Can we use the existing condition code?
2064 if (AreEflagsSetFrom(condition, select)) {
2065 // Materialization was the previous instruction. Condition codes are right.
2066 cond = X86_64IntegerCondition(condition->GetCondition());
2067 } else {
2068 // No, we have to recreate the condition code.
2069 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
2070 __ testl(cond_reg, cond_reg);
2071 }
2072 } else {
2073 GenerateCompareTest(condition);
2074 cond = X86_64IntegerCondition(condition->GetCondition());
2075 }
2076 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01002077 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002078 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
2079 __ testl(cond_reg, cond_reg);
2080 }
2081
2082 // If the condition is true, overwrite the output, which already contains false.
2083 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002084 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05002085 if (value_true_loc.IsRegister()) {
2086 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
2087 } else {
2088 __ cmov(cond,
2089 value_false,
2090 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
2091 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002092 } else {
2093 NearLabel false_target;
2094 GenerateTestAndBranch<NearLabel>(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08002095 /* condition_input_index= */ 2,
2096 /* true_target= */ nullptr,
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002097 &false_target);
2098 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2099 __ Bind(&false_target);
2100 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002101}
2102
David Srbecky0cf44932015-12-09 14:09:59 +00002103void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002104 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00002105}
2106
David Srbeckyd28f4a02016-03-14 17:14:24 +00002107void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
2108 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002109}
2110
Vladimir Markodec78172020-06-19 15:31:23 +01002111void CodeGeneratorX86_64::IncreaseFrame(size_t adjustment) {
2112 __ subq(CpuRegister(RSP), Immediate(adjustment));
2113 __ cfi().AdjustCFAOffset(adjustment);
2114}
2115
2116void CodeGeneratorX86_64::DecreaseFrame(size_t adjustment) {
2117 __ addq(CpuRegister(RSP), Immediate(adjustment));
2118 __ cfi().AdjustCFAOffset(-adjustment);
2119}
2120
David Srbeckyc7098ff2016-02-09 14:30:11 +00002121void CodeGeneratorX86_64::GenerateNop() {
2122 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002123}
2124
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002125void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002126 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002127 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04002128 // Handle the long/FP comparisons made in instruction simplification.
2129 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002130 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002131 locations->SetInAt(0, Location::RequiresRegister());
2132 locations->SetInAt(1, Location::Any());
2133 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002134 case DataType::Type::kFloat32:
2135 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04002136 locations->SetInAt(0, Location::RequiresFpuRegister());
2137 locations->SetInAt(1, Location::Any());
2138 break;
2139 default:
2140 locations->SetInAt(0, Location::RequiresRegister());
2141 locations->SetInAt(1, Location::Any());
2142 break;
2143 }
David Brazdilb3e773e2016-01-26 11:28:37 +00002144 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01002145 locations->SetOut(Location::RequiresRegister());
2146 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002147}
2148
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002149void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002150 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002151 return;
Dave Allison20dfc792014-06-16 20:44:29 -07002152 }
Mark Mendellc4701932015-04-10 13:18:51 -04002153
2154 LocationSummary* locations = cond->GetLocations();
2155 Location lhs = locations->InAt(0);
2156 Location rhs = locations->InAt(1);
2157 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05002158 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002159
2160 switch (cond->InputAt(0)->GetType()) {
2161 default:
2162 // Integer case.
2163
2164 // Clear output register: setcc only sets the low byte.
2165 __ xorl(reg, reg);
2166
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002167 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002168 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002169 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002170 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002171 // Clear output register: setcc only sets the low byte.
2172 __ xorl(reg, reg);
2173
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002174 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002175 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002176 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002177 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04002178 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2179 if (rhs.IsConstant()) {
2180 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
2181 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
2182 } else if (rhs.IsStackSlot()) {
2183 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2184 } else {
2185 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2186 }
2187 GenerateFPJumps(cond, &true_label, &false_label);
2188 break;
2189 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002190 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002191 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2192 if (rhs.IsConstant()) {
2193 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
2194 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
2195 } else if (rhs.IsDoubleStackSlot()) {
2196 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2197 } else {
2198 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2199 }
2200 GenerateFPJumps(cond, &true_label, &false_label);
2201 break;
2202 }
2203 }
2204
2205 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002206 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002207
Roland Levillain4fa13f62015-07-06 18:11:54 +01002208 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002209 __ Bind(&false_label);
2210 __ xorl(reg, reg);
2211 __ jmp(&done_label);
2212
Roland Levillain4fa13f62015-07-06 18:11:54 +01002213 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002214 __ Bind(&true_label);
2215 __ movl(reg, Immediate(1));
2216 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002217}
2218
2219void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002220 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002221}
2222
2223void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002224 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002225}
2226
2227void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002228 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002229}
2230
2231void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002232 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002233}
2234
2235void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002236 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002237}
2238
2239void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002240 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002241}
2242
2243void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002244 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002245}
2246
2247void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002248 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002249}
2250
2251void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002252 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002253}
2254
2255void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002256 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002257}
2258
2259void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002260 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002261}
2262
2263void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002264 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002265}
2266
Aart Bike9f37602015-10-09 11:15:55 -07002267void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002268 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002269}
2270
2271void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002272 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002273}
2274
2275void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002276 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002277}
2278
2279void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002280 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002281}
2282
2283void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002284 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002285}
2286
2287void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002288 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002289}
2290
2291void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002292 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002293}
2294
2295void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002296 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002297}
2298
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002299void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002300 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002301 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002302 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002303 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002304 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002305 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002306 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002307 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002308 case DataType::Type::kInt32:
2309 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002310 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002311 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002312 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2313 break;
2314 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002315 case DataType::Type::kFloat32:
2316 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002317 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002318 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002319 locations->SetOut(Location::RequiresRegister());
2320 break;
2321 }
2322 default:
2323 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2324 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002325}
2326
2327void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002328 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002329 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002330 Location left = locations->InAt(0);
2331 Location right = locations->InAt(1);
2332
Mark Mendell0c9497d2015-08-21 09:30:05 -04002333 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002334 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002335 Condition less_cond = kLess;
2336
Calin Juravleddb7df22014-11-25 20:56:51 +00002337 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002338 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002339 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002340 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002341 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002342 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002343 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002344 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002345 break;
2346 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002347 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002348 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002349 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002350 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002351 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002352 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2353 if (right.IsConstant()) {
2354 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2355 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2356 } else if (right.IsStackSlot()) {
2357 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2358 } else {
2359 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2360 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002361 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002362 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002363 break;
2364 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002365 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002366 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2367 if (right.IsConstant()) {
2368 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2369 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2370 } else if (right.IsDoubleStackSlot()) {
2371 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2372 } else {
2373 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2374 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002375 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002376 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002377 break;
2378 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002379 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002380 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002381 }
Aart Bika19616e2016-02-01 18:57:58 -08002382
Calin Juravleddb7df22014-11-25 20:56:51 +00002383 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002384 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002385 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002386
Calin Juravle91debbc2014-11-26 19:01:09 +00002387 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002388 __ movl(out, Immediate(1));
2389 __ jmp(&done);
2390
2391 __ Bind(&less);
2392 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002393
2394 __ Bind(&done);
2395}
2396
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002397void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002398 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002399 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002400 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002401}
2402
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002403void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002404 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002405}
2406
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002407void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2408 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002409 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002410 locations->SetOut(Location::ConstantLocation(constant));
2411}
2412
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002413void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002414 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002415}
2416
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002417void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002418 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002419 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002420 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002421}
2422
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002423void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002424 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002425}
2426
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002427void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2428 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002429 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002430 locations->SetOut(Location::ConstantLocation(constant));
2431}
2432
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002433void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002434 // Will be generated at use site.
2435}
2436
2437void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2438 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002439 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002440 locations->SetOut(Location::ConstantLocation(constant));
2441}
2442
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002443void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2444 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002445 // Will be generated at use site.
2446}
2447
Igor Murashkind01745e2017-04-05 16:40:31 -07002448void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2449 constructor_fence->SetLocations(nullptr);
2450}
2451
2452void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2453 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2454 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2455}
2456
Calin Juravle27df7582015-04-17 19:12:31 +01002457void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2458 memory_barrier->SetLocations(nullptr);
2459}
2460
2461void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002462 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002463}
2464
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002465void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2466 ret->SetLocations(nullptr);
2467}
2468
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002469void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002470 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002471}
2472
2473void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002474 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002475 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002476 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002477 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002478 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002479 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002480 case DataType::Type::kInt8:
2481 case DataType::Type::kUint16:
2482 case DataType::Type::kInt16:
2483 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002484 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002485 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002486 break;
2487
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002488 case DataType::Type::kFloat32:
2489 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002490 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002491 break;
2492
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002493 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002494 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002495 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002496}
2497
2498void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002499 switch (ret->InputAt(0)->GetType()) {
2500 case DataType::Type::kReference:
2501 case DataType::Type::kBool:
2502 case DataType::Type::kUint8:
2503 case DataType::Type::kInt8:
2504 case DataType::Type::kUint16:
2505 case DataType::Type::kInt16:
2506 case DataType::Type::kInt32:
2507 case DataType::Type::kInt64:
2508 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
2509 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002510
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002511 case DataType::Type::kFloat32: {
2512 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2513 XMM0);
2514 // To simplify callers of an OSR method, we put the return value in both
2515 // floating point and core register.
2516 if (GetGraph()->IsCompilingOsr()) {
2517 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ false);
2518 }
2519 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002520 }
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002521 case DataType::Type::kFloat64: {
2522 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2523 XMM0);
2524 // To simplify callers of an OSR method, we put the return value in both
2525 // floating point and core register.
2526 if (GetGraph()->IsCompilingOsr()) {
2527 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ true);
2528 }
2529 break;
2530 }
2531
2532 default:
2533 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002534 }
2535 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002536}
2537
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002538Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002539 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002540 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002541 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002542 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002543 case DataType::Type::kInt8:
2544 case DataType::Type::kUint16:
2545 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002546 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002547 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002548 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002549 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002550 return Location::RegisterLocation(RAX);
2551
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002552 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002553 return Location::NoLocation();
2554
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002555 case DataType::Type::kFloat64:
2556 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002557 return Location::FpuRegisterLocation(XMM0);
2558 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002559
2560 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002561}
2562
2563Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2564 return Location::RegisterLocation(kMethodRegisterArgument);
2565}
2566
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002567Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002568 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002569 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002570 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002571 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002572 case DataType::Type::kInt8:
2573 case DataType::Type::kUint16:
2574 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002575 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002576 uint32_t index = gp_index_++;
2577 stack_index_++;
2578 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002579 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002580 } else {
2581 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2582 }
2583 }
2584
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002585 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002586 uint32_t index = gp_index_;
2587 stack_index_ += 2;
2588 if (index < calling_convention.GetNumberOfRegisters()) {
2589 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002590 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002591 } else {
2592 gp_index_ += 2;
2593 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2594 }
2595 }
2596
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002597 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002598 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002599 stack_index_++;
2600 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002601 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002602 } else {
2603 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2604 }
2605 }
2606
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002607 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002608 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002609 stack_index_ += 2;
2610 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002611 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002612 } else {
2613 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2614 }
2615 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002616
Aart Bik66c158e2018-01-31 12:55:04 -08002617 case DataType::Type::kUint32:
2618 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002619 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002620 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002621 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002622 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002623 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002624}
2625
Vladimir Marko86c87522020-05-11 16:55:55 +01002626Location CriticalNativeCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
2627 DCHECK_NE(type, DataType::Type::kReference);
2628
2629 Location location = Location::NoLocation();
2630 if (DataType::IsFloatingPointType(type)) {
2631 if (fpr_index_ < kParameterFloatRegistersLength) {
2632 location = Location::FpuRegisterLocation(kParameterFloatRegisters[fpr_index_]);
2633 ++fpr_index_;
2634 }
2635 } else {
2636 // Native ABI uses the same registers as managed, except that the method register RDI
2637 // is a normal argument.
2638 if (gpr_index_ < 1u + kParameterCoreRegistersLength) {
2639 location = Location::RegisterLocation(
2640 gpr_index_ == 0u ? RDI : kParameterCoreRegisters[gpr_index_ - 1u]);
2641 ++gpr_index_;
2642 }
2643 }
2644 if (location.IsInvalid()) {
2645 if (DataType::Is64BitType(type)) {
2646 location = Location::DoubleStackSlot(stack_offset_);
2647 } else {
2648 location = Location::StackSlot(stack_offset_);
2649 }
2650 stack_offset_ += kFramePointerSize;
2651
2652 if (for_register_allocation_) {
2653 location = Location::Any();
2654 }
2655 }
2656 return location;
2657}
2658
2659Location CriticalNativeCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type)
2660 const {
2661 // We perform conversion to the managed ABI return register after the call if needed.
2662 InvokeDexCallingConventionVisitorX86_64 dex_calling_convention;
2663 return dex_calling_convention.GetReturnLocation(type);
2664}
2665
2666Location CriticalNativeCallingConventionVisitorX86_64::GetMethodLocation() const {
2667 // Pass the method in the hidden argument RAX.
2668 return Location::RegisterLocation(RAX);
2669}
2670
Calin Juravle175dc732015-08-25 15:42:32 +01002671void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2672 // The trampoline uses the same calling convention as dex calling conventions,
2673 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2674 // the method_idx.
2675 HandleInvoke(invoke);
2676}
2677
2678void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2679 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2680}
2681
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002682void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002683 // Explicit clinit checks triggered by static invokes must have been pruned by
2684 // art::PrepareForRegisterAllocation.
2685 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002686
Mark Mendellfb8d2792015-03-31 22:16:59 -04002687 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002688 if (intrinsic.TryDispatch(invoke)) {
2689 return;
2690 }
2691
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01002692 if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
Vladimir Marko86c87522020-05-11 16:55:55 +01002693 CriticalNativeCallingConventionVisitorX86_64 calling_convention_visitor(
2694 /*for_register_allocation=*/ true);
2695 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
2696 CodeGeneratorX86_64::BlockNonVolatileXmmRegisters(invoke->GetLocations());
2697 } else {
2698 HandleInvoke(invoke);
2699 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002700}
2701
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002702static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2703 if (invoke->GetLocations()->Intrinsified()) {
2704 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2705 intrinsic.Dispatch(invoke);
2706 return true;
2707 }
2708 return false;
2709}
2710
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002711void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002712 // Explicit clinit checks triggered by static invokes must have been pruned by
2713 // art::PrepareForRegisterAllocation.
2714 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002715
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002716 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2717 return;
2718 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002719
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002720 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002721 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002722 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002723}
2724
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002725void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002726 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002727 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002728}
2729
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002730void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002731 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002732 if (intrinsic.TryDispatch(invoke)) {
2733 return;
2734 }
2735
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002736 HandleInvoke(invoke);
2737}
2738
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002739void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002740 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2741 return;
2742 }
2743
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002744 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002745 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002746}
2747
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002748void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2749 HandleInvoke(invoke);
2750 // Add the hidden argument.
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002751 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive) {
2752 invoke->GetLocations()->SetInAt(invoke->GetNumberOfArguments() - 1,
2753 Location::RegisterLocation(RAX));
2754 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002755 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2756}
2757
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002758void CodeGeneratorX86_64::MaybeGenerateInlineCacheCheck(HInstruction* instruction,
2759 CpuRegister klass) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002760 DCHECK_EQ(RDI, klass.AsRegister());
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002761 // We know the destination of an intrinsic, so no need to record inline
2762 // caches.
2763 if (!instruction->GetLocations()->Intrinsified() &&
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00002764 GetGraph()->IsCompilingBaseline() &&
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002765 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray095dc462020-08-17 16:40:28 +01002766 ScopedProfilingInfoUse spiu(
2767 Runtime::Current()->GetJit(), GetGraph()->GetArtMethod(), Thread::Current());
2768 ProfilingInfo* info = spiu.GetProfilingInfo();
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002769 if (info != nullptr) {
2770 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
2771 uint64_t address = reinterpret_cast64<uint64_t>(cache);
2772 NearLabel done;
2773 __ movq(CpuRegister(TMP), Immediate(address));
2774 // Fast path for a monomorphic cache.
2775 __ cmpl(Address(CpuRegister(TMP), InlineCache::ClassesOffset().Int32Value()), klass);
2776 __ j(kEqual, &done);
2777 GenerateInvokeRuntime(
2778 GetThreadOffset<kX86_64PointerSize>(kQuickUpdateInlineCache).Int32Value());
2779 __ Bind(&done);
2780 }
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002781 }
2782}
2783
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002784void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2785 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002786 LocationSummary* locations = invoke->GetLocations();
2787 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002788 Location receiver = locations->InAt(0);
2789 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2790
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002791 if (receiver.IsStackSlot()) {
2792 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002793 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002794 __ movl(temp, Address(temp, class_offset));
2795 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002796 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002797 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002798 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002799 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002800 // Instead of simply (possibly) unpoisoning `temp` here, we should
2801 // emit a read barrier for the previous class reference load.
2802 // However this is not required in practice, as this is an
2803 // intermediate/temporary reference and because the current
2804 // concurrent copying collector keeps the from-space memory
2805 // intact/accessible until the end of the marking phase (the
2806 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002807 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002808
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002809 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002810
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002811 if (invoke->GetHiddenArgumentLoadKind() != MethodLoadKind::kRecursive &&
2812 invoke->GetHiddenArgumentLoadKind() != MethodLoadKind::kRuntimeCall) {
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002813 Location hidden_reg = locations->GetTemp(1);
2814 // Set the hidden argument. This is safe to do this here, as RAX
2815 // won't be modified thereafter, before the `call` instruction.
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002816 // We also do it after MaybeGenerateInlineCache that may use RAX.
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002817 DCHECK_EQ(RAX, hidden_reg.AsRegister<Register>());
2818 codegen_->LoadMethod(invoke->GetHiddenArgumentLoadKind(), hidden_reg, invoke);
2819 }
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002820
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002821 // temp = temp->GetAddressOfIMT()
2822 __ movq(temp,
2823 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2824 // temp = temp->GetImtEntryAt(method_offset);
2825 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002826 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002827 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002828 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002829 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRuntimeCall) {
2830 // We pass the method from the IMT in case of a conflict. This will ensure
2831 // we go into the runtime to resolve the actual method.
2832 Location hidden_reg = locations->GetTemp(1);
2833 __ movq(hidden_reg.AsRegister<CpuRegister>(), temp);
2834 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002835 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002836 __ call(Address(
2837 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002838
2839 DCHECK(!codegen_->IsLeafMethod());
2840 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2841}
2842
Orion Hodsonac141392017-01-13 11:53:47 +00002843void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00002844 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
2845 if (intrinsic.TryDispatch(invoke)) {
2846 return;
2847 }
Orion Hodsonac141392017-01-13 11:53:47 +00002848 HandleInvoke(invoke);
2849}
2850
2851void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00002852 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2853 return;
2854 }
Orion Hodsonac141392017-01-13 11:53:47 +00002855 codegen_->GenerateInvokePolymorphicCall(invoke);
2856}
2857
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002858void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2859 HandleInvoke(invoke);
2860}
2861
2862void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2863 codegen_->GenerateInvokeCustomCall(invoke);
2864}
2865
Roland Levillain88cb1752014-10-20 16:36:47 +01002866void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2867 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002868 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002869 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002870 case DataType::Type::kInt32:
2871 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002872 locations->SetInAt(0, Location::RequiresRegister());
2873 locations->SetOut(Location::SameAsFirstInput());
2874 break;
2875
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002876 case DataType::Type::kFloat32:
2877 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002878 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002879 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002880 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002881 break;
2882
2883 default:
2884 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2885 }
2886}
2887
2888void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2889 LocationSummary* locations = neg->GetLocations();
2890 Location out = locations->Out();
2891 Location in = locations->InAt(0);
2892 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002893 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002894 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002895 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002896 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002897 break;
2898
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002899 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002900 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002901 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002902 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002903 break;
2904
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002905 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002906 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002907 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002908 // Implement float negation with an exclusive or with value
2909 // 0x80000000 (mask for bit 31, representing the sign of a
2910 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002911 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002912 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002913 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002914 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002915
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002916 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002917 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002918 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002919 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002920 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002921 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002922 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002923 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002924 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002925 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002926
2927 default:
2928 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2929 }
2930}
2931
Roland Levillaindff1f282014-11-05 14:15:05 +00002932void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2933 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002934 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002935 DataType::Type result_type = conversion->GetResultType();
2936 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002937 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2938 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002939
Roland Levillaindff1f282014-11-05 14:15:05 +00002940 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002941 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002942 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002943 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002944 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002945 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2946 locations->SetInAt(0, Location::Any());
2947 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002948 break;
2949
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002950 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002951 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002952 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002953 locations->SetInAt(0, Location::Any());
2954 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2955 break;
2956
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002957 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002958 locations->SetInAt(0, Location::RequiresFpuRegister());
2959 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002960 break;
2961
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002962 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002963 locations->SetInAt(0, Location::RequiresFpuRegister());
2964 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002965 break;
2966
2967 default:
2968 LOG(FATAL) << "Unexpected type conversion from " << input_type
2969 << " to " << result_type;
2970 }
2971 break;
2972
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002973 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002974 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002975 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002976 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002977 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002978 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002979 case DataType::Type::kInt16:
2980 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002981 // TODO: We would benefit from a (to-be-implemented)
2982 // Location::RegisterOrStackSlot requirement for this input.
2983 locations->SetInAt(0, Location::RequiresRegister());
2984 locations->SetOut(Location::RequiresRegister());
2985 break;
2986
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002987 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002988 locations->SetInAt(0, Location::RequiresFpuRegister());
2989 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002990 break;
2991
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002992 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002993 locations->SetInAt(0, Location::RequiresFpuRegister());
2994 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002995 break;
2996
2997 default:
2998 LOG(FATAL) << "Unexpected type conversion from " << input_type
2999 << " to " << result_type;
3000 }
3001 break;
3002
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003003 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003004 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003005 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003006 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003007 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003008 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003009 case DataType::Type::kInt16:
3010 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003011 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00003012 locations->SetOut(Location::RequiresFpuRegister());
3013 break;
3014
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003015 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003016 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00003017 locations->SetOut(Location::RequiresFpuRegister());
3018 break;
3019
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003020 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003021 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00003022 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003023 break;
3024
3025 default:
3026 LOG(FATAL) << "Unexpected type conversion from " << input_type
3027 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003028 }
Roland Levillaincff13742014-11-17 14:32:17 +00003029 break;
3030
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003031 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003032 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003033 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003034 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003035 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003036 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003037 case DataType::Type::kInt16:
3038 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003039 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00003040 locations->SetOut(Location::RequiresFpuRegister());
3041 break;
3042
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003043 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003044 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00003045 locations->SetOut(Location::RequiresFpuRegister());
3046 break;
3047
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003048 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003049 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00003050 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003051 break;
3052
3053 default:
3054 LOG(FATAL) << "Unexpected type conversion from " << input_type
3055 << " to " << result_type;
3056 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003057 break;
3058
3059 default:
3060 LOG(FATAL) << "Unexpected type conversion from " << input_type
3061 << " to " << result_type;
3062 }
3063}
3064
3065void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
3066 LocationSummary* locations = conversion->GetLocations();
3067 Location out = locations->Out();
3068 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003069 DataType::Type result_type = conversion->GetResultType();
3070 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003071 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3072 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00003073 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003074 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00003075 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003076 case DataType::Type::kInt8:
3077 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003078 case DataType::Type::kInt16:
3079 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003080 case DataType::Type::kInt64:
3081 if (in.IsRegister()) {
3082 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
3083 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
3084 __ movzxb(out.AsRegister<CpuRegister>(),
3085 Address(CpuRegister(RSP), in.GetStackIndex()));
3086 } else {
3087 __ movl(out.AsRegister<CpuRegister>(),
3088 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
3089 }
3090 break;
3091
3092 default:
3093 LOG(FATAL) << "Unexpected type conversion from " << input_type
3094 << " to " << result_type;
3095 }
3096 break;
3097
3098 case DataType::Type::kInt8:
3099 switch (input_type) {
3100 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003101 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003102 case DataType::Type::kInt16:
3103 case DataType::Type::kInt32:
3104 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00003105 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003106 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00003107 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003108 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00003109 Address(CpuRegister(RSP), in.GetStackIndex()));
3110 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003111 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00003112 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00003113 }
3114 break;
3115
3116 default:
3117 LOG(FATAL) << "Unexpected type conversion from " << input_type
3118 << " to " << result_type;
3119 }
3120 break;
3121
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003122 case DataType::Type::kUint16:
3123 switch (input_type) {
3124 case DataType::Type::kInt8:
3125 case DataType::Type::kInt16:
3126 case DataType::Type::kInt32:
3127 case DataType::Type::kInt64:
3128 if (in.IsRegister()) {
3129 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
3130 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
3131 __ movzxw(out.AsRegister<CpuRegister>(),
3132 Address(CpuRegister(RSP), in.GetStackIndex()));
3133 } else {
3134 __ movl(out.AsRegister<CpuRegister>(),
3135 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
3136 }
3137 break;
3138
3139 default:
3140 LOG(FATAL) << "Unexpected type conversion from " << input_type
3141 << " to " << result_type;
3142 }
3143 break;
3144
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003145 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00003146 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003147 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003148 case DataType::Type::kInt32:
3149 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00003150 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003151 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00003152 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003153 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00003154 Address(CpuRegister(RSP), in.GetStackIndex()));
3155 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003156 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00003157 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00003158 }
3159 break;
3160
3161 default:
3162 LOG(FATAL) << "Unexpected type conversion from " << input_type
3163 << " to " << result_type;
3164 }
3165 break;
3166
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003167 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00003168 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003169 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00003170 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003171 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00003172 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003173 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00003174 Address(CpuRegister(RSP), in.GetStackIndex()));
3175 } else {
3176 DCHECK(in.IsConstant());
3177 DCHECK(in.GetConstant()->IsLongConstant());
3178 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003179 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00003180 }
3181 break;
3182
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003183 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00003184 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3185 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003186 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00003187
3188 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04003189 // if input >= (float)INT_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003190 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimIntMax)));
Roland Levillain3f8f9362014-12-02 17:45:01 +00003191 __ j(kAboveEqual, &done);
3192 // if input == NaN goto nan
3193 __ j(kUnordered, &nan);
3194 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003195 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00003196 __ jmp(&done);
3197 __ Bind(&nan);
3198 // output = 0
3199 __ xorl(output, output);
3200 __ Bind(&done);
3201 break;
3202 }
3203
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003204 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003205 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3206 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003207 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003208
3209 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04003210 // if input >= (double)INT_MAX goto done
3211 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003212 __ j(kAboveEqual, &done);
3213 // if input == NaN goto nan
3214 __ j(kUnordered, &nan);
3215 // output = double-to-int-truncate(input)
3216 __ cvttsd2si(output, input);
3217 __ jmp(&done);
3218 __ Bind(&nan);
3219 // output = 0
3220 __ xorl(output, output);
3221 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00003222 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003223 }
Roland Levillain946e1432014-11-11 17:35:19 +00003224
3225 default:
3226 LOG(FATAL) << "Unexpected type conversion from " << input_type
3227 << " to " << result_type;
3228 }
3229 break;
3230
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003231 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00003232 switch (input_type) {
3233 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003234 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003235 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003236 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003237 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003238 case DataType::Type::kInt16:
3239 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00003240 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003241 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00003242 break;
3243
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003244 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00003245 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3246 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003247 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00003248
Mark Mendell92e83bf2015-05-07 11:25:03 -04003249 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003250 // if input >= (float)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003251 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimLongMax)));
Roland Levillain624279f2014-12-04 11:54:28 +00003252 __ j(kAboveEqual, &done);
3253 // if input == NaN goto nan
3254 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003255 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003256 __ cvttss2si(output, input, true);
3257 __ jmp(&done);
3258 __ Bind(&nan);
3259 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003260 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00003261 __ Bind(&done);
3262 break;
3263 }
3264
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003265 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003266 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3267 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003268 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003269
Mark Mendell92e83bf2015-05-07 11:25:03 -04003270 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003271 // if input >= (double)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003272 __ comisd(input, codegen_->LiteralDoubleAddress(
3273 static_cast<double>(kPrimLongMax)));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003274 __ j(kAboveEqual, &done);
3275 // if input == NaN goto nan
3276 __ j(kUnordered, &nan);
3277 // output = double-to-long-truncate(input)
3278 __ cvttsd2si(output, input, true);
3279 __ jmp(&done);
3280 __ Bind(&nan);
3281 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003282 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003283 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00003284 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003285 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003286
3287 default:
3288 LOG(FATAL) << "Unexpected type conversion from " << input_type
3289 << " to " << result_type;
3290 }
3291 break;
3292
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003293 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003294 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003295 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003296 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003297 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003298 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003299 case DataType::Type::kInt16:
3300 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003301 if (in.IsRegister()) {
3302 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3303 } else if (in.IsConstant()) {
3304 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3305 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003306 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003307 } else {
3308 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3309 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3310 }
Roland Levillaincff13742014-11-17 14:32:17 +00003311 break;
3312
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003313 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003314 if (in.IsRegister()) {
3315 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3316 } else if (in.IsConstant()) {
3317 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3318 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06003319 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003320 } else {
3321 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3322 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3323 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003324 break;
3325
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003326 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003327 if (in.IsFpuRegister()) {
3328 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3329 } else if (in.IsConstant()) {
3330 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
3331 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003332 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003333 } else {
3334 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
3335 Address(CpuRegister(RSP), in.GetStackIndex()));
3336 }
Roland Levillaincff13742014-11-17 14:32:17 +00003337 break;
3338
3339 default:
3340 LOG(FATAL) << "Unexpected type conversion from " << input_type
3341 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003342 }
Roland Levillaincff13742014-11-17 14:32:17 +00003343 break;
3344
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003345 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003346 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003347 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003348 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003349 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003350 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003351 case DataType::Type::kInt16:
3352 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003353 if (in.IsRegister()) {
3354 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3355 } else if (in.IsConstant()) {
3356 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3357 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003358 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003359 } else {
3360 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3361 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3362 }
Roland Levillaincff13742014-11-17 14:32:17 +00003363 break;
3364
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003365 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003366 if (in.IsRegister()) {
3367 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3368 } else if (in.IsConstant()) {
3369 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3370 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003371 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003372 } else {
3373 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3374 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3375 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003376 break;
3377
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003378 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003379 if (in.IsFpuRegister()) {
3380 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3381 } else if (in.IsConstant()) {
3382 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3383 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003384 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003385 } else {
3386 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3387 Address(CpuRegister(RSP), in.GetStackIndex()));
3388 }
Roland Levillaincff13742014-11-17 14:32:17 +00003389 break;
3390
3391 default:
3392 LOG(FATAL) << "Unexpected type conversion from " << input_type
3393 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003394 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003395 break;
3396
3397 default:
3398 LOG(FATAL) << "Unexpected type conversion from " << input_type
3399 << " to " << result_type;
3400 }
3401}
3402
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003403void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003404 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003405 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003406 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003407 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003408 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003409 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3410 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003411 break;
3412 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003413
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003414 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003415 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003416 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003417 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003418 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003419 break;
3420 }
3421
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003422 case DataType::Type::kFloat64:
3423 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003424 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003425 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003426 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003427 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003428 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003429
3430 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003431 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003432 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003433}
3434
3435void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3436 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003437 Location first = locations->InAt(0);
3438 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003439 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003440
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003441 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003442 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003443 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003444 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3445 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003446 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3447 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003448 } else {
3449 __ leal(out.AsRegister<CpuRegister>(), Address(
3450 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3451 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003452 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003453 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3454 __ addl(out.AsRegister<CpuRegister>(),
3455 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3456 } else {
3457 __ leal(out.AsRegister<CpuRegister>(), Address(
3458 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3459 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003460 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003461 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003462 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003463 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003464 break;
3465 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003466
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003467 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003468 if (second.IsRegister()) {
3469 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3470 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003471 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3472 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003473 } else {
3474 __ leaq(out.AsRegister<CpuRegister>(), Address(
3475 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3476 }
3477 } else {
3478 DCHECK(second.IsConstant());
3479 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3480 int32_t int32_value = Low32Bits(value);
3481 DCHECK_EQ(int32_value, value);
3482 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3483 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3484 } else {
3485 __ leaq(out.AsRegister<CpuRegister>(), Address(
3486 first.AsRegister<CpuRegister>(), int32_value));
3487 }
3488 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003489 break;
3490 }
3491
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003492 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003493 if (second.IsFpuRegister()) {
3494 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3495 } else if (second.IsConstant()) {
3496 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003497 codegen_->LiteralFloatAddress(
3498 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003499 } else {
3500 DCHECK(second.IsStackSlot());
3501 __ addss(first.AsFpuRegister<XmmRegister>(),
3502 Address(CpuRegister(RSP), second.GetStackIndex()));
3503 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003504 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003505 }
3506
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003507 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003508 if (second.IsFpuRegister()) {
3509 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3510 } else if (second.IsConstant()) {
3511 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003512 codegen_->LiteralDoubleAddress(
3513 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003514 } else {
3515 DCHECK(second.IsDoubleStackSlot());
3516 __ addsd(first.AsFpuRegister<XmmRegister>(),
3517 Address(CpuRegister(RSP), second.GetStackIndex()));
3518 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003519 break;
3520 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003521
3522 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003523 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003524 }
3525}
3526
3527void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003528 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003529 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003530 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003531 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003532 locations->SetInAt(0, Location::RequiresRegister());
3533 locations->SetInAt(1, Location::Any());
3534 locations->SetOut(Location::SameAsFirstInput());
3535 break;
3536 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003537 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003538 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003539 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003540 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003541 break;
3542 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003543 case DataType::Type::kFloat32:
3544 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003545 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003546 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003547 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003548 break;
Calin Juravle11351682014-10-23 15:38:15 +01003549 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003550 default:
Calin Juravle11351682014-10-23 15:38:15 +01003551 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003552 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003553}
3554
3555void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3556 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003557 Location first = locations->InAt(0);
3558 Location second = locations->InAt(1);
3559 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003560 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003561 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003562 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003563 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003564 } else if (second.IsConstant()) {
3565 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003566 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003567 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003568 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003569 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003570 break;
3571 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003572 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003573 if (second.IsConstant()) {
3574 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3575 DCHECK(IsInt<32>(value));
3576 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3577 } else {
3578 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3579 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003580 break;
3581 }
3582
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003583 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003584 if (second.IsFpuRegister()) {
3585 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3586 } else if (second.IsConstant()) {
3587 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003588 codegen_->LiteralFloatAddress(
3589 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003590 } else {
3591 DCHECK(second.IsStackSlot());
3592 __ subss(first.AsFpuRegister<XmmRegister>(),
3593 Address(CpuRegister(RSP), second.GetStackIndex()));
3594 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003595 break;
Calin Juravle11351682014-10-23 15:38:15 +01003596 }
3597
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003598 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003599 if (second.IsFpuRegister()) {
3600 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3601 } else if (second.IsConstant()) {
3602 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003603 codegen_->LiteralDoubleAddress(
3604 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003605 } else {
3606 DCHECK(second.IsDoubleStackSlot());
3607 __ subsd(first.AsFpuRegister<XmmRegister>(),
3608 Address(CpuRegister(RSP), second.GetStackIndex()));
3609 }
Calin Juravle11351682014-10-23 15:38:15 +01003610 break;
3611 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003612
3613 default:
Calin Juravle11351682014-10-23 15:38:15 +01003614 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003615 }
3616}
3617
Calin Juravle34bacdf2014-10-07 20:23:36 +01003618void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3619 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003620 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003621 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003622 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003623 locations->SetInAt(0, Location::RequiresRegister());
3624 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003625 if (mul->InputAt(1)->IsIntConstant()) {
3626 // Can use 3 operand multiply.
3627 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3628 } else {
3629 locations->SetOut(Location::SameAsFirstInput());
3630 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003631 break;
3632 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003633 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003634 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003635 locations->SetInAt(1, Location::Any());
3636 if (mul->InputAt(1)->IsLongConstant() &&
3637 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003638 // Can use 3 operand multiply.
3639 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3640 } else {
3641 locations->SetOut(Location::SameAsFirstInput());
3642 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003643 break;
3644 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003645 case DataType::Type::kFloat32:
3646 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003647 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003648 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003649 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003650 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003651 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003652
3653 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003654 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003655 }
3656}
3657
3658void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3659 LocationSummary* locations = mul->GetLocations();
3660 Location first = locations->InAt(0);
3661 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003662 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003663 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003664 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003665 // The constant may have ended up in a register, so test explicitly to avoid
3666 // problems where the output may not be the same as the first operand.
3667 if (mul->InputAt(1)->IsIntConstant()) {
3668 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3669 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3670 } else if (second.IsRegister()) {
3671 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003672 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003673 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003674 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003675 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003676 __ imull(first.AsRegister<CpuRegister>(),
3677 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003678 }
3679 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003680 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003681 // The constant may have ended up in a register, so test explicitly to avoid
3682 // problems where the output may not be the same as the first operand.
3683 if (mul->InputAt(1)->IsLongConstant()) {
3684 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3685 if (IsInt<32>(value)) {
3686 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3687 Immediate(static_cast<int32_t>(value)));
3688 } else {
3689 // Have to use the constant area.
3690 DCHECK(first.Equals(out));
3691 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3692 }
3693 } else if (second.IsRegister()) {
3694 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003695 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003696 } else {
3697 DCHECK(second.IsDoubleStackSlot());
3698 DCHECK(first.Equals(out));
3699 __ imulq(first.AsRegister<CpuRegister>(),
3700 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003701 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003702 break;
3703 }
3704
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003705 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003706 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003707 if (second.IsFpuRegister()) {
3708 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3709 } else if (second.IsConstant()) {
3710 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003711 codegen_->LiteralFloatAddress(
3712 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003713 } else {
3714 DCHECK(second.IsStackSlot());
3715 __ mulss(first.AsFpuRegister<XmmRegister>(),
3716 Address(CpuRegister(RSP), second.GetStackIndex()));
3717 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003718 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003719 }
3720
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003721 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003722 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003723 if (second.IsFpuRegister()) {
3724 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3725 } else if (second.IsConstant()) {
3726 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003727 codegen_->LiteralDoubleAddress(
3728 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003729 } else {
3730 DCHECK(second.IsDoubleStackSlot());
3731 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3732 Address(CpuRegister(RSP), second.GetStackIndex()));
3733 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003734 break;
3735 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003736
3737 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003738 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003739 }
3740}
3741
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003742void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3743 uint32_t stack_adjustment, bool is_float) {
3744 if (source.IsStackSlot()) {
3745 DCHECK(is_float);
3746 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3747 } else if (source.IsDoubleStackSlot()) {
3748 DCHECK(!is_float);
3749 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3750 } else {
3751 // Write the value to the temporary location on the stack and load to FP stack.
3752 if (is_float) {
3753 Location stack_temp = Location::StackSlot(temp_offset);
3754 codegen_->Move(stack_temp, source);
3755 __ flds(Address(CpuRegister(RSP), temp_offset));
3756 } else {
3757 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3758 codegen_->Move(stack_temp, source);
3759 __ fldl(Address(CpuRegister(RSP), temp_offset));
3760 }
3761 }
3762}
3763
3764void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003765 DataType::Type type = rem->GetResultType();
3766 bool is_float = type == DataType::Type::kFloat32;
3767 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003768 LocationSummary* locations = rem->GetLocations();
3769 Location first = locations->InAt(0);
3770 Location second = locations->InAt(1);
3771 Location out = locations->Out();
3772
3773 // Create stack space for 2 elements.
3774 // TODO: enhance register allocator to ask for stack temporaries.
3775 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3776
3777 // Load the values to the FP stack in reverse order, using temporaries if needed.
3778 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3779 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3780
3781 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003782 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003783 __ Bind(&retry);
3784 __ fprem();
3785
3786 // Move FP status to AX.
3787 __ fstsw();
3788
3789 // And see if the argument reduction is complete. This is signaled by the
3790 // C2 FPU flag bit set to 0.
3791 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3792 __ j(kNotEqual, &retry);
3793
3794 // We have settled on the final value. Retrieve it into an XMM register.
3795 // Store FP top of stack to real stack.
3796 if (is_float) {
3797 __ fsts(Address(CpuRegister(RSP), 0));
3798 } else {
3799 __ fstl(Address(CpuRegister(RSP), 0));
3800 }
3801
3802 // Pop the 2 items from the FP stack.
3803 __ fucompp();
3804
3805 // Load the value from the stack into an XMM register.
3806 DCHECK(out.IsFpuRegister()) << out;
3807 if (is_float) {
3808 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3809 } else {
3810 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3811 }
3812
3813 // And remove the temporary stack space we allocated.
3814 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3815}
3816
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003817void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3818 DCHECK(instruction->IsDiv() || instruction->IsRem());
3819
3820 LocationSummary* locations = instruction->GetLocations();
3821 Location second = locations->InAt(1);
3822 DCHECK(second.IsConstant());
3823
3824 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3825 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003826 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003827
3828 DCHECK(imm == 1 || imm == -1);
3829
3830 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003831 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003832 if (instruction->IsRem()) {
3833 __ xorl(output_register, output_register);
3834 } else {
3835 __ movl(output_register, input_register);
3836 if (imm == -1) {
3837 __ negl(output_register);
3838 }
3839 }
3840 break;
3841 }
3842
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003843 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003844 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003845 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003846 } else {
3847 __ movq(output_register, input_register);
3848 if (imm == -1) {
3849 __ negq(output_register);
3850 }
3851 }
3852 break;
3853 }
3854
3855 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003856 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003857 }
3858}
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303859void InstructionCodeGeneratorX86_64::RemByPowerOfTwo(HRem* instruction) {
3860 LocationSummary* locations = instruction->GetLocations();
3861 Location second = locations->InAt(1);
3862 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3863 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3864 int64_t imm = Int64FromConstant(second.GetConstant());
3865 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3866 uint64_t abs_imm = AbsOrMin(imm);
3867 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3868 if (instruction->GetResultType() == DataType::Type::kInt32) {
3869 NearLabel done;
3870 __ movl(out, numerator);
3871 __ andl(out, Immediate(abs_imm-1));
3872 __ j(Condition::kZero, &done);
3873 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3874 __ testl(numerator, numerator);
3875 __ cmov(Condition::kLess, out, tmp, false);
3876 __ Bind(&done);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003877
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303878 } else {
3879 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
3880 codegen_->Load64BitValue(tmp, abs_imm - 1);
3881 NearLabel done;
3882
3883 __ movq(out, numerator);
3884 __ andq(out, tmp);
3885 __ j(Condition::kZero, &done);
3886 __ movq(tmp, numerator);
3887 __ sarq(tmp, Immediate(63));
3888 __ shlq(tmp, Immediate(WhichPowerOf2(abs_imm)));
3889 __ orq(out, tmp);
3890 __ Bind(&done);
3891 }
3892}
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003893void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003894 LocationSummary* locations = instruction->GetLocations();
3895 Location second = locations->InAt(1);
3896
3897 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3898 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3899
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003900 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003901 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3902 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003903
3904 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3905
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003906 if (instruction->GetResultType() == DataType::Type::kInt32) {
Atul Bajaj1cc73292018-11-15 11:36:53 +05303907 // When denominator is equal to 2, we can add signed bit and numerator to tmp.
3908 // Below we are using addl instruction instead of cmov which give us 1 cycle benefit.
3909 if (abs_imm == 2) {
3910 __ leal(tmp, Address(numerator, 0));
3911 __ shrl(tmp, Immediate(31));
3912 __ addl(tmp, numerator);
3913 } else {
3914 __ leal(tmp, Address(numerator, abs_imm - 1));
3915 __ testl(numerator, numerator);
3916 __ cmov(kGreaterEqual, tmp, numerator);
3917 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003918 int shift = CTZ(imm);
3919 __ sarl(tmp, Immediate(shift));
3920
3921 if (imm < 0) {
3922 __ negl(tmp);
3923 }
3924
3925 __ movl(output_register, tmp);
3926 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003927 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003928 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
Atul Bajaj1cc73292018-11-15 11:36:53 +05303929 if (abs_imm == 2) {
3930 __ movq(rdx, numerator);
3931 __ shrq(rdx, Immediate(63));
3932 __ addq(rdx, numerator);
3933 } else {
3934 codegen_->Load64BitValue(rdx, abs_imm - 1);
3935 __ addq(rdx, numerator);
3936 __ testq(numerator, numerator);
3937 __ cmov(kGreaterEqual, rdx, numerator);
3938 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003939 int shift = CTZ(imm);
3940 __ sarq(rdx, Immediate(shift));
3941
3942 if (imm < 0) {
3943 __ negq(rdx);
3944 }
3945
3946 __ movq(output_register, rdx);
3947 }
3948}
3949
3950void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3951 DCHECK(instruction->IsDiv() || instruction->IsRem());
3952
3953 LocationSummary* locations = instruction->GetLocations();
3954 Location second = locations->InAt(1);
3955
3956 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3957 : locations->GetTemp(0).AsRegister<CpuRegister>();
3958 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3959 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3960 : locations->Out().AsRegister<CpuRegister>();
3961 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3962
3963 DCHECK_EQ(RAX, eax.AsRegister());
3964 DCHECK_EQ(RDX, edx.AsRegister());
3965 if (instruction->IsDiv()) {
3966 DCHECK_EQ(RAX, out.AsRegister());
3967 } else {
3968 DCHECK_EQ(RDX, out.AsRegister());
3969 }
3970
3971 int64_t magic;
3972 int shift;
3973
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003974 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003975 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003976 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3977
Andreas Gampe3db70682018-12-26 15:12:03 -08003978 CalculateMagicAndShiftForDivRem(imm, false /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003979
3980 __ movl(numerator, eax);
3981
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003982 __ movl(eax, Immediate(magic));
3983 __ imull(numerator);
3984
3985 if (imm > 0 && magic < 0) {
3986 __ addl(edx, numerator);
3987 } else if (imm < 0 && magic > 0) {
3988 __ subl(edx, numerator);
3989 }
3990
3991 if (shift != 0) {
3992 __ sarl(edx, Immediate(shift));
3993 }
3994
3995 __ movl(eax, edx);
3996 __ shrl(edx, Immediate(31));
3997 __ addl(edx, eax);
3998
3999 if (instruction->IsRem()) {
4000 __ movl(eax, numerator);
4001 __ imull(edx, Immediate(imm));
4002 __ subl(eax, edx);
4003 __ movl(edx, eax);
4004 } else {
4005 __ movl(eax, edx);
4006 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004007 } else {
4008 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
4009
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004010 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004011
4012 CpuRegister rax = eax;
4013 CpuRegister rdx = edx;
4014
Andreas Gampe3db70682018-12-26 15:12:03 -08004015 CalculateMagicAndShiftForDivRem(imm, true /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004016
4017 // Save the numerator.
4018 __ movq(numerator, rax);
4019
4020 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04004021 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004022
4023 // RDX:RAX = magic * numerator
4024 __ imulq(numerator);
4025
4026 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004027 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004028 __ addq(rdx, numerator);
4029 } else if (imm < 0 && magic > 0) {
4030 // RDX -= numerator
4031 __ subq(rdx, numerator);
4032 }
4033
4034 // Shift if needed.
4035 if (shift != 0) {
4036 __ sarq(rdx, Immediate(shift));
4037 }
4038
4039 // RDX += 1 if RDX < 0
4040 __ movq(rax, rdx);
4041 __ shrq(rdx, Immediate(63));
4042 __ addq(rdx, rax);
4043
4044 if (instruction->IsRem()) {
4045 __ movq(rax, numerator);
4046
4047 if (IsInt<32>(imm)) {
4048 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
4049 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04004050 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004051 }
4052
4053 __ subq(rax, rdx);
4054 __ movq(rdx, rax);
4055 } else {
4056 __ movq(rax, rdx);
4057 }
4058 }
4059}
4060
Calin Juravlebacfec32014-11-14 15:54:36 +00004061void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
4062 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004063 DataType::Type type = instruction->GetResultType();
4064 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00004065
4066 bool is_div = instruction->IsDiv();
4067 LocationSummary* locations = instruction->GetLocations();
4068
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004069 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4070 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00004071
Roland Levillain271ab9c2014-11-27 15:23:57 +00004072 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004073 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00004074
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004075 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004076 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00004077
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004078 if (imm == 0) {
4079 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
4080 } else if (imm == 1 || imm == -1) {
4081 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05304082 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
4083 if (is_div) {
4084 DivByPowerOfTwo(instruction->AsDiv());
4085 } else {
4086 RemByPowerOfTwo(instruction->AsRem());
4087 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004088 } else {
4089 DCHECK(imm <= -2 || imm >= 2);
4090 GenerateDivRemWithAnyConstant(instruction);
4091 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004092 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004093 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004094 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00004095 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004096 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00004097
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004098 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4099 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
4100 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
4101 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004102 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004103 __ cmpl(second_reg, Immediate(-1));
4104 __ j(kEqual, slow_path->GetEntryLabel());
4105 // edx:eax <- sign-extended of eax
4106 __ cdq();
4107 // eax = quotient, edx = remainder
4108 __ idivl(second_reg);
4109 } else {
4110 __ cmpq(second_reg, Immediate(-1));
4111 __ j(kEqual, slow_path->GetEntryLabel());
4112 // rdx:rax <- sign-extended of rax
4113 __ cqo();
4114 // rax = quotient, rdx = remainder
4115 __ idivq(second_reg);
4116 }
4117 __ Bind(slow_path->GetExitLabel());
4118 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004119}
4120
Calin Juravle7c4954d2014-10-28 16:57:40 +00004121void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
4122 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004123 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00004124 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004125 case DataType::Type::kInt32:
4126 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00004127 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004128 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00004129 locations->SetOut(Location::SameAsFirstInput());
4130 // Intel uses edx:eax as the dividend.
4131 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004132 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
4133 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
4134 // output and request another temp.
4135 if (div->InputAt(1)->IsConstant()) {
4136 locations->AddTemp(Location::RequiresRegister());
4137 }
Calin Juravled0d48522014-11-04 16:40:20 +00004138 break;
4139 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004140
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004141 case DataType::Type::kFloat32:
4142 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00004143 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04004144 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00004145 locations->SetOut(Location::SameAsFirstInput());
4146 break;
4147 }
4148
4149 default:
4150 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4151 }
4152}
4153
4154void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
4155 LocationSummary* locations = div->GetLocations();
4156 Location first = locations->InAt(0);
4157 Location second = locations->InAt(1);
4158 DCHECK(first.Equals(locations->Out()));
4159
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004160 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004161 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004162 case DataType::Type::kInt32:
4163 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004164 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00004165 break;
4166 }
4167
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004168 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04004169 if (second.IsFpuRegister()) {
4170 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
4171 } else if (second.IsConstant()) {
4172 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004173 codegen_->LiteralFloatAddress(
4174 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04004175 } else {
4176 DCHECK(second.IsStackSlot());
4177 __ divss(first.AsFpuRegister<XmmRegister>(),
4178 Address(CpuRegister(RSP), second.GetStackIndex()));
4179 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004180 break;
4181 }
4182
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004183 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04004184 if (second.IsFpuRegister()) {
4185 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
4186 } else if (second.IsConstant()) {
4187 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004188 codegen_->LiteralDoubleAddress(
4189 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04004190 } else {
4191 DCHECK(second.IsDoubleStackSlot());
4192 __ divsd(first.AsFpuRegister<XmmRegister>(),
4193 Address(CpuRegister(RSP), second.GetStackIndex()));
4194 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004195 break;
4196 }
4197
4198 default:
4199 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4200 }
4201}
4202
Calin Juravlebacfec32014-11-14 15:54:36 +00004203void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004204 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004205 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004206 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004207
4208 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004209 case DataType::Type::kInt32:
4210 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004211 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004212 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00004213 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
4214 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004215 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
4216 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
4217 // output and request another temp.
4218 if (rem->InputAt(1)->IsConstant()) {
4219 locations->AddTemp(Location::RequiresRegister());
4220 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004221 break;
4222 }
4223
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004224 case DataType::Type::kFloat32:
4225 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004226 locations->SetInAt(0, Location::Any());
4227 locations->SetInAt(1, Location::Any());
4228 locations->SetOut(Location::RequiresFpuRegister());
4229 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00004230 break;
4231 }
4232
4233 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00004234 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00004235 }
4236}
4237
4238void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004239 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00004240 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004241 case DataType::Type::kInt32:
4242 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004243 GenerateDivRemIntegral(rem);
4244 break;
4245 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004246 case DataType::Type::kFloat32:
4247 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004248 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004249 break;
4250 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004251 default:
4252 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
4253 }
4254}
4255
Aart Bik1f8d51b2018-02-15 10:42:37 -08004256static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4257 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4258 switch (minmax->GetResultType()) {
4259 case DataType::Type::kInt32:
4260 case DataType::Type::kInt64:
4261 locations->SetInAt(0, Location::RequiresRegister());
4262 locations->SetInAt(1, Location::RequiresRegister());
4263 locations->SetOut(Location::SameAsFirstInput());
4264 break;
4265 case DataType::Type::kFloat32:
4266 case DataType::Type::kFloat64:
4267 locations->SetInAt(0, Location::RequiresFpuRegister());
4268 locations->SetInAt(1, Location::RequiresFpuRegister());
4269 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
4270 // the second input to be the output (we can simply swap inputs).
4271 locations->SetOut(Location::SameAsFirstInput());
4272 break;
4273 default:
4274 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4275 }
4276}
4277
Aart Bik351df3e2018-03-07 11:54:57 -08004278void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
4279 bool is_min,
4280 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004281 Location op1_loc = locations->InAt(0);
4282 Location op2_loc = locations->InAt(1);
4283
4284 // Shortcut for same input locations.
4285 if (op1_loc.Equals(op2_loc)) {
4286 // Can return immediately, as op1_loc == out_loc.
4287 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
4288 // a copy here.
4289 DCHECK(locations->Out().Equals(op1_loc));
4290 return;
4291 }
4292
4293 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4294 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
4295
4296 // (out := op1)
4297 // out <=? op2
4298 // if out is min jmp done
4299 // out := op2
4300 // done:
4301
4302 if (type == DataType::Type::kInt64) {
4303 __ cmpq(out, op2);
4304 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
4305 } else {
4306 DCHECK_EQ(type, DataType::Type::kInt32);
4307 __ cmpl(out, op2);
4308 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
4309 }
4310}
4311
4312void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
4313 bool is_min,
4314 DataType::Type type) {
4315 Location op1_loc = locations->InAt(0);
4316 Location op2_loc = locations->InAt(1);
4317 Location out_loc = locations->Out();
4318 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4319
4320 // Shortcut for same input locations.
4321 if (op1_loc.Equals(op2_loc)) {
4322 DCHECK(out_loc.Equals(op1_loc));
4323 return;
4324 }
4325
4326 // (out := op1)
4327 // out <=? op2
4328 // if Nan jmp Nan_label
4329 // if out is min jmp done
4330 // if op2 is min jmp op2_label
4331 // handle -0/+0
4332 // jmp done
4333 // Nan_label:
4334 // out := NaN
4335 // op2_label:
4336 // out := op2
4337 // done:
4338 //
4339 // This removes one jmp, but needs to copy one input (op1) to out.
4340 //
4341 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
4342
4343 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4344
4345 NearLabel nan, done, op2_label;
4346 if (type == DataType::Type::kFloat64) {
4347 __ ucomisd(out, op2);
4348 } else {
4349 DCHECK_EQ(type, DataType::Type::kFloat32);
4350 __ ucomiss(out, op2);
4351 }
4352
4353 __ j(Condition::kParityEven, &nan);
4354
4355 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4356 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4357
4358 // Handle 0.0/-0.0.
4359 if (is_min) {
4360 if (type == DataType::Type::kFloat64) {
4361 __ orpd(out, op2);
4362 } else {
4363 __ orps(out, op2);
4364 }
4365 } else {
4366 if (type == DataType::Type::kFloat64) {
4367 __ andpd(out, op2);
4368 } else {
4369 __ andps(out, op2);
4370 }
4371 }
4372 __ jmp(&done);
4373
4374 // NaN handling.
4375 __ Bind(&nan);
4376 if (type == DataType::Type::kFloat64) {
4377 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
4378 } else {
4379 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
4380 }
4381 __ jmp(&done);
4382
4383 // out := op2;
4384 __ Bind(&op2_label);
4385 if (type == DataType::Type::kFloat64) {
4386 __ movsd(out, op2);
4387 } else {
4388 __ movss(out, op2);
4389 }
4390
4391 // Done.
4392 __ Bind(&done);
4393}
4394
Aart Bik351df3e2018-03-07 11:54:57 -08004395void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4396 DataType::Type type = minmax->GetResultType();
4397 switch (type) {
4398 case DataType::Type::kInt32:
4399 case DataType::Type::kInt64:
4400 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4401 break;
4402 case DataType::Type::kFloat32:
4403 case DataType::Type::kFloat64:
4404 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4405 break;
4406 default:
4407 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4408 }
4409}
4410
Aart Bik1f8d51b2018-02-15 10:42:37 -08004411void LocationsBuilderX86_64::VisitMin(HMin* min) {
4412 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4413}
4414
4415void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004416 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004417}
4418
4419void LocationsBuilderX86_64::VisitMax(HMax* max) {
4420 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4421}
4422
4423void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004424 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004425}
4426
Aart Bik3dad3412018-02-28 12:01:46 -08004427void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4428 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4429 switch (abs->GetResultType()) {
4430 case DataType::Type::kInt32:
4431 case DataType::Type::kInt64:
4432 locations->SetInAt(0, Location::RequiresRegister());
4433 locations->SetOut(Location::SameAsFirstInput());
4434 locations->AddTemp(Location::RequiresRegister());
4435 break;
4436 case DataType::Type::kFloat32:
4437 case DataType::Type::kFloat64:
4438 locations->SetInAt(0, Location::RequiresFpuRegister());
4439 locations->SetOut(Location::SameAsFirstInput());
4440 locations->AddTemp(Location::RequiresFpuRegister());
4441 break;
4442 default:
4443 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4444 }
4445}
4446
4447void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4448 LocationSummary* locations = abs->GetLocations();
4449 switch (abs->GetResultType()) {
4450 case DataType::Type::kInt32: {
4451 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4452 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4453 // Create mask.
4454 __ movl(mask, out);
4455 __ sarl(mask, Immediate(31));
4456 // Add mask.
4457 __ addl(out, mask);
4458 __ xorl(out, mask);
4459 break;
4460 }
4461 case DataType::Type::kInt64: {
4462 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4463 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4464 // Create mask.
4465 __ movq(mask, out);
4466 __ sarq(mask, Immediate(63));
4467 // Add mask.
4468 __ addq(out, mask);
4469 __ xorq(out, mask);
4470 break;
4471 }
4472 case DataType::Type::kFloat32: {
4473 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4474 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4475 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4476 __ andps(out, mask);
4477 break;
4478 }
4479 case DataType::Type::kFloat64: {
4480 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4481 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4482 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4483 __ andpd(out, mask);
4484 break;
4485 }
4486 default:
4487 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4488 }
4489}
4490
Calin Juravled0d48522014-11-04 16:40:20 +00004491void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004492 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004493 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004494}
4495
4496void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004497 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004498 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004499 codegen_->AddSlowPath(slow_path);
4500
4501 LocationSummary* locations = instruction->GetLocations();
4502 Location value = locations->InAt(0);
4503
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004504 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004505 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004506 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004507 case DataType::Type::kInt8:
4508 case DataType::Type::kUint16:
4509 case DataType::Type::kInt16:
4510 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004511 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004512 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004513 __ j(kEqual, slow_path->GetEntryLabel());
4514 } else if (value.IsStackSlot()) {
4515 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4516 __ j(kEqual, slow_path->GetEntryLabel());
4517 } else {
4518 DCHECK(value.IsConstant()) << value;
4519 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004520 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004521 }
4522 }
4523 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004524 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004525 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004526 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004527 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004528 __ j(kEqual, slow_path->GetEntryLabel());
4529 } else if (value.IsDoubleStackSlot()) {
4530 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4531 __ j(kEqual, slow_path->GetEntryLabel());
4532 } else {
4533 DCHECK(value.IsConstant()) << value;
4534 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004535 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004536 }
4537 }
4538 break;
4539 }
4540 default:
4541 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004542 }
Calin Juravled0d48522014-11-04 16:40:20 +00004543}
4544
Calin Juravle9aec02f2014-11-18 23:06:35 +00004545void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4546 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4547
4548 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004549 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004550
4551 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004552 case DataType::Type::kInt32:
4553 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004554 locations->SetInAt(0, Location::RequiresRegister());
4555 // The shift count needs to be in CL.
4556 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4557 locations->SetOut(Location::SameAsFirstInput());
4558 break;
4559 }
4560 default:
4561 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4562 }
4563}
4564
4565void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4566 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4567
4568 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004569 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004570 Location second = locations->InAt(1);
4571
4572 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004573 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004574 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004575 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004576 if (op->IsShl()) {
4577 __ shll(first_reg, second_reg);
4578 } else if (op->IsShr()) {
4579 __ sarl(first_reg, second_reg);
4580 } else {
4581 __ shrl(first_reg, second_reg);
4582 }
4583 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004584 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004585 if (op->IsShl()) {
4586 __ shll(first_reg, imm);
4587 } else if (op->IsShr()) {
4588 __ sarl(first_reg, imm);
4589 } else {
4590 __ shrl(first_reg, imm);
4591 }
4592 }
4593 break;
4594 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004595 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004596 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004597 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004598 if (op->IsShl()) {
4599 __ shlq(first_reg, second_reg);
4600 } else if (op->IsShr()) {
4601 __ sarq(first_reg, second_reg);
4602 } else {
4603 __ shrq(first_reg, second_reg);
4604 }
4605 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004606 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004607 if (op->IsShl()) {
4608 __ shlq(first_reg, imm);
4609 } else if (op->IsShr()) {
4610 __ sarq(first_reg, imm);
4611 } else {
4612 __ shrq(first_reg, imm);
4613 }
4614 }
4615 break;
4616 }
4617 default:
4618 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004619 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004620 }
4621}
4622
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004623void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4624 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004625 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004626
4627 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004628 case DataType::Type::kInt32:
4629 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004630 locations->SetInAt(0, Location::RequiresRegister());
4631 // The shift count needs to be in CL (unless it is a constant).
4632 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4633 locations->SetOut(Location::SameAsFirstInput());
4634 break;
4635 }
4636 default:
4637 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4638 UNREACHABLE();
4639 }
4640}
4641
4642void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4643 LocationSummary* locations = ror->GetLocations();
4644 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4645 Location second = locations->InAt(1);
4646
4647 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004648 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004649 if (second.IsRegister()) {
4650 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4651 __ rorl(first_reg, second_reg);
4652 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004653 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004654 __ rorl(first_reg, imm);
4655 }
4656 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004657 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004658 if (second.IsRegister()) {
4659 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4660 __ rorq(first_reg, second_reg);
4661 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004662 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004663 __ rorq(first_reg, imm);
4664 }
4665 break;
4666 default:
4667 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4668 UNREACHABLE();
4669 }
4670}
4671
Calin Juravle9aec02f2014-11-18 23:06:35 +00004672void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4673 HandleShift(shl);
4674}
4675
4676void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4677 HandleShift(shl);
4678}
4679
4680void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4681 HandleShift(shr);
4682}
4683
4684void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4685 HandleShift(shr);
4686}
4687
4688void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4689 HandleShift(ushr);
4690}
4691
4692void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4693 HandleShift(ushr);
4694}
4695
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004696void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004697 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4698 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004699 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07004700 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004701 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004702}
4703
4704void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004705 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4706 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4707 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004708}
4709
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004710void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004711 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4712 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004713 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004714 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004715 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4716 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004717}
4718
4719void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004720 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4721 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004722 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004723 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004724 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004725}
4726
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004727void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004728 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004729 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004730 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4731 if (location.IsStackSlot()) {
4732 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4733 } else if (location.IsDoubleStackSlot()) {
4734 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4735 }
4736 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004737}
4738
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004739void InstructionCodeGeneratorX86_64::VisitParameterValue(
4740 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004741 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004742}
4743
4744void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4745 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004746 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004747 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4748}
4749
4750void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4751 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4752 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004753}
4754
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004755void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4756 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004757 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004758 locations->SetInAt(0, Location::RequiresRegister());
4759 locations->SetOut(Location::RequiresRegister());
4760}
4761
4762void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4763 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004764 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004765 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004766 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004767 __ movq(locations->Out().AsRegister<CpuRegister>(),
4768 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004769 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004770 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004771 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004772 __ movq(locations->Out().AsRegister<CpuRegister>(),
4773 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4774 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004775 __ movq(locations->Out().AsRegister<CpuRegister>(),
4776 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004777 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004778}
4779
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004780void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004781 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004782 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004783 locations->SetInAt(0, Location::RequiresRegister());
4784 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004785}
4786
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004787void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4788 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004789 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4790 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004791 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004792 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004793 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004794 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004795 break;
4796
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004797 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004798 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004799 break;
4800
4801 default:
4802 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4803 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004804}
4805
David Brazdil66d126e2015-04-03 16:02:44 +01004806void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4807 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004808 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004809 locations->SetInAt(0, Location::RequiresRegister());
4810 locations->SetOut(Location::SameAsFirstInput());
4811}
4812
4813void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004814 LocationSummary* locations = bool_not->GetLocations();
4815 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4816 locations->Out().AsRegister<CpuRegister>().AsRegister());
4817 Location out = locations->Out();
4818 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4819}
4820
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004821void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004822 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004823 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004824 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004825 locations->SetInAt(i, Location::Any());
4826 }
4827 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004828}
4829
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004830void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004831 LOG(FATAL) << "Unimplemented";
4832}
4833
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004834void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004835 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004836 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004837 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004838 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4839 */
4840 switch (kind) {
4841 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004842 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004843 break;
4844 }
4845 case MemBarrierKind::kAnyStore:
4846 case MemBarrierKind::kLoadAny:
4847 case MemBarrierKind::kStoreStore: {
4848 // nop
4849 break;
4850 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004851 case MemBarrierKind::kNTStoreStore:
4852 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004853 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004854 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004855 }
4856}
4857
4858void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4859 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4860
Roland Levillain0d5a2812015-11-13 10:07:31 +00004861 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004862 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004863 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004864 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4865 object_field_get_with_read_barrier
4866 ? LocationSummary::kCallOnSlowPath
4867 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004868 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004869 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004870 }
Calin Juravle52c48962014-12-16 17:02:57 +00004871 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004872 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004873 locations->SetOut(Location::RequiresFpuRegister());
4874 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004875 // The output overlaps for an object field get when read barriers
4876 // are enabled: we do not want the move to overwrite the object's
4877 // location, as we need it to emit the read barrier.
4878 locations->SetOut(
4879 Location::RequiresRegister(),
4880 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004881 }
Calin Juravle52c48962014-12-16 17:02:57 +00004882}
4883
4884void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4885 const FieldInfo& field_info) {
4886 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4887
4888 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004889 Location base_loc = locations->InAt(0);
4890 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004891 Location out = locations->Out();
4892 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004893 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4894 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004895 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4896
Vladimir Marko61b92282017-10-11 13:23:17 +01004897 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004898 case DataType::Type::kBool:
4899 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004900 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4901 break;
4902 }
4903
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004904 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004905 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4906 break;
4907 }
4908
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004909 case DataType::Type::kUint16: {
4910 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004911 break;
4912 }
4913
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004914 case DataType::Type::kInt16: {
4915 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004916 break;
4917 }
4918
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004919 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004920 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4921 break;
4922 }
4923
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004924 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004925 // /* HeapReference<Object> */ out = *(base + offset)
4926 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004927 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004928 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004929 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08004930 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004931 if (is_volatile) {
4932 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4933 }
4934 } else {
4935 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4936 codegen_->MaybeRecordImplicitNullCheck(instruction);
4937 if (is_volatile) {
4938 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4939 }
4940 // If read barriers are enabled, emit read barriers other than
4941 // Baker's using a slow path (and also unpoison the loaded
4942 // reference, if heap poisoning is enabled).
4943 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4944 }
4945 break;
4946 }
4947
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004948 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004949 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4950 break;
4951 }
4952
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004953 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004954 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4955 break;
4956 }
4957
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004958 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004959 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4960 break;
4961 }
4962
Aart Bik66c158e2018-01-31 12:55:04 -08004963 case DataType::Type::kUint32:
4964 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004965 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004966 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004967 UNREACHABLE();
4968 }
4969
Vladimir Marko61b92282017-10-11 13:23:17 +01004970 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004971 // Potential implicit null checks, in the case of reference
4972 // fields, are handled in the previous switch statement.
4973 } else {
4974 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004975 }
Roland Levillain4d027112015-07-01 15:41:14 +01004976
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004977 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004978 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004979 // Memory barriers, in the case of references, are also handled
4980 // in the previous switch statement.
4981 } else {
4982 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4983 }
Roland Levillain4d027112015-07-01 15:41:14 +01004984 }
Calin Juravle52c48962014-12-16 17:02:57 +00004985}
4986
4987void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4988 const FieldInfo& field_info) {
4989 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4990
4991 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004992 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004993 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004994 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004995 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004996 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004997
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004998 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004999 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04005000 if (is_volatile) {
5001 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5002 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
5003 } else {
5004 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
5005 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005006 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04005007 if (is_volatile) {
5008 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5009 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
5010 } else {
5011 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5012 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005013 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005014 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01005015 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01005016 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005017 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005018 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01005019 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005020 locations->AddTemp(Location::RequiresRegister());
5021 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005022}
5023
Calin Juravle52c48962014-12-16 17:02:57 +00005024void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005025 const FieldInfo& field_info,
5026 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00005027 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5028
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005029 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00005030 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
5031 Location value = locations->InAt(1);
5032 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005033 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00005034 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5035
5036 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005037 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00005038 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005039
Mark Mendellea5af682015-10-22 17:35:49 -04005040 bool maybe_record_implicit_null_check_done = false;
5041
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005042 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005043 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005044 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005045 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04005046 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005047 __ movb(Address(base, offset),
5048 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04005049 } else {
5050 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
5051 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005052 break;
5053 }
5054
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005055 case DataType::Type::kUint16:
5056 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04005057 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005058 __ movw(Address(base, offset),
5059 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04005060 } else {
5061 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
5062 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005063 break;
5064 }
5065
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005066 case DataType::Type::kInt32:
5067 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04005068 if (value.IsConstant()) {
5069 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005070 // `field_type == DataType::Type::kReference` implies `v == 0`.
5071 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01005072 // Note: if heap poisoning is enabled, no need to poison
5073 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01005074 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04005075 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005076 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01005077 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
5078 __ movl(temp, value.AsRegister<CpuRegister>());
5079 __ PoisonHeapReference(temp);
5080 __ movl(Address(base, offset), temp);
5081 } else {
5082 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
5083 }
Mark Mendell40741f32015-04-20 22:10:34 -04005084 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005085 break;
5086 }
5087
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005088 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04005089 if (value.IsConstant()) {
5090 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04005091 codegen_->MoveInt64ToAddress(Address(base, offset),
5092 Address(base, offset + sizeof(int32_t)),
5093 v,
5094 instruction);
5095 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04005096 } else {
5097 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
5098 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005099 break;
5100 }
5101
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005102 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04005103 if (value.IsConstant()) {
5104 int32_t v =
5105 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
5106 __ movl(Address(base, offset), Immediate(v));
5107 } else {
5108 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5109 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005110 break;
5111 }
5112
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005113 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04005114 if (value.IsConstant()) {
5115 int64_t v =
5116 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5117 codegen_->MoveInt64ToAddress(Address(base, offset),
5118 Address(base, offset + sizeof(int32_t)),
5119 v,
5120 instruction);
5121 maybe_record_implicit_null_check_done = true;
5122 } else {
5123 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5124 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005125 break;
5126 }
5127
Aart Bik66c158e2018-01-31 12:55:04 -08005128 case DataType::Type::kUint32:
5129 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005130 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005131 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005132 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005133 }
Calin Juravle52c48962014-12-16 17:02:57 +00005134
Mark Mendellea5af682015-10-22 17:35:49 -04005135 if (!maybe_record_implicit_null_check_done) {
5136 codegen_->MaybeRecordImplicitNullCheck(instruction);
5137 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005138
5139 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
5140 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
5141 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005142 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005143 }
5144
Calin Juravle52c48962014-12-16 17:02:57 +00005145 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005146 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005147 }
5148}
5149
5150void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5151 HandleFieldSet(instruction, instruction->GetFieldInfo());
5152}
5153
5154void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005155 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005156}
5157
5158void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00005159 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005160}
5161
5162void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00005163 HandleFieldGet(instruction, instruction->GetFieldInfo());
5164}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005165
Calin Juravle52c48962014-12-16 17:02:57 +00005166void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5167 HandleFieldGet(instruction);
5168}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005169
Calin Juravle52c48962014-12-16 17:02:57 +00005170void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5171 HandleFieldGet(instruction, instruction->GetFieldInfo());
5172}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005173
Calin Juravle52c48962014-12-16 17:02:57 +00005174void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5175 HandleFieldSet(instruction, instruction->GetFieldInfo());
5176}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005177
Calin Juravle52c48962014-12-16 17:02:57 +00005178void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005179 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005180}
5181
Vladimir Marko552a1342017-10-31 10:56:47 +00005182void LocationsBuilderX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5183 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(RAX));
5184}
5185
5186void InstructionCodeGeneratorX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5187 __ movl(CpuRegister(RDI), Immediate(instruction->GetFormat()->GetValue()));
5188 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5189}
5190
Calin Juravlee460d1d2015-09-29 04:52:17 +01005191void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
5192 HUnresolvedInstanceFieldGet* instruction) {
5193 FieldAccessCallingConventionX86_64 calling_convention;
5194 codegen_->CreateUnresolvedFieldLocationSummary(
5195 instruction, instruction->GetFieldType(), calling_convention);
5196}
5197
5198void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
5199 HUnresolvedInstanceFieldGet* instruction) {
5200 FieldAccessCallingConventionX86_64 calling_convention;
5201 codegen_->GenerateUnresolvedFieldAccess(instruction,
5202 instruction->GetFieldType(),
5203 instruction->GetFieldIndex(),
5204 instruction->GetDexPc(),
5205 calling_convention);
5206}
5207
5208void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
5209 HUnresolvedInstanceFieldSet* instruction) {
5210 FieldAccessCallingConventionX86_64 calling_convention;
5211 codegen_->CreateUnresolvedFieldLocationSummary(
5212 instruction, instruction->GetFieldType(), calling_convention);
5213}
5214
5215void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
5216 HUnresolvedInstanceFieldSet* instruction) {
5217 FieldAccessCallingConventionX86_64 calling_convention;
5218 codegen_->GenerateUnresolvedFieldAccess(instruction,
5219 instruction->GetFieldType(),
5220 instruction->GetFieldIndex(),
5221 instruction->GetDexPc(),
5222 calling_convention);
5223}
5224
5225void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
5226 HUnresolvedStaticFieldGet* instruction) {
5227 FieldAccessCallingConventionX86_64 calling_convention;
5228 codegen_->CreateUnresolvedFieldLocationSummary(
5229 instruction, instruction->GetFieldType(), calling_convention);
5230}
5231
5232void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
5233 HUnresolvedStaticFieldGet* instruction) {
5234 FieldAccessCallingConventionX86_64 calling_convention;
5235 codegen_->GenerateUnresolvedFieldAccess(instruction,
5236 instruction->GetFieldType(),
5237 instruction->GetFieldIndex(),
5238 instruction->GetDexPc(),
5239 calling_convention);
5240}
5241
5242void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
5243 HUnresolvedStaticFieldSet* instruction) {
5244 FieldAccessCallingConventionX86_64 calling_convention;
5245 codegen_->CreateUnresolvedFieldLocationSummary(
5246 instruction, instruction->GetFieldType(), calling_convention);
5247}
5248
5249void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
5250 HUnresolvedStaticFieldSet* instruction) {
5251 FieldAccessCallingConventionX86_64 calling_convention;
5252 codegen_->GenerateUnresolvedFieldAccess(instruction,
5253 instruction->GetFieldType(),
5254 instruction->GetFieldIndex(),
5255 instruction->GetDexPc(),
5256 calling_convention);
5257}
5258
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005259void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005260 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5261 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5262 ? Location::RequiresRegister()
5263 : Location::Any();
5264 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005265}
5266
Calin Juravle2ae48182016-03-16 14:05:09 +00005267void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5268 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005269 return;
5270 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005271 LocationSummary* locations = instruction->GetLocations();
5272 Location obj = locations->InAt(0);
5273
5274 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005275 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005276}
5277
Calin Juravle2ae48182016-03-16 14:05:09 +00005278void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005279 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005280 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005281
5282 LocationSummary* locations = instruction->GetLocations();
5283 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005284
5285 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005286 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005287 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005288 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005289 } else {
5290 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005291 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005292 __ jmp(slow_path->GetEntryLabel());
5293 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005294 }
5295 __ j(kEqual, slow_path->GetEntryLabel());
5296}
5297
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005298void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005299 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005300}
5301
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005302void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005303 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005304 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005305 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005306 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5307 object_array_get_with_read_barrier
5308 ? LocationSummary::kCallOnSlowPath
5309 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005310 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005311 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005312 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005313 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005314 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005315 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005316 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5317 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005318 // The output overlaps for an object array get when read barriers
5319 // are enabled: we do not want the move to overwrite the array's
5320 // location, as we need it to emit the read barrier.
5321 locations->SetOut(
5322 Location::RequiresRegister(),
5323 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005324 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005325}
5326
5327void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
5328 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005329 Location obj_loc = locations->InAt(0);
5330 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005331 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005332 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005333 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005334
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005335 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01005336 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005337 case DataType::Type::kBool:
5338 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005339 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005340 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005341 break;
5342 }
5343
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005344 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005345 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005346 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005347 break;
5348 }
5349
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005350 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005351 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07005352 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5353 // Branch cases into compressed and uncompressed for each index's type.
5354 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5355 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005356 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005357 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005358 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5359 "Expecting 0=compressed, 1=uncompressed");
5360 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005361 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
5362 __ jmp(&done);
5363 __ Bind(&not_compressed);
5364 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5365 __ Bind(&done);
5366 } else {
5367 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5368 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005369 break;
5370 }
5371
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005372 case DataType::Type::kInt16: {
5373 CpuRegister out = out_loc.AsRegister<CpuRegister>();
5374 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5375 break;
5376 }
5377
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005378 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005379 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005380 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005381 break;
5382 }
5383
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005384 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005385 static_assert(
5386 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5387 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005388 // /* HeapReference<Object> */ out =
5389 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5390 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005391 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005392 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005393 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005394 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005395 } else {
5396 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005397 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
5398 codegen_->MaybeRecordImplicitNullCheck(instruction);
5399 // If read barriers are enabled, emit read barriers other than
5400 // Baker's using a slow path (and also unpoison the loaded
5401 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005402 if (index.IsConstant()) {
5403 uint32_t offset =
5404 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005405 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5406 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005407 codegen_->MaybeGenerateReadBarrierSlow(
5408 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5409 }
5410 }
5411 break;
5412 }
5413
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005414 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005415 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005416 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005417 break;
5418 }
5419
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005420 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005421 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005422 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005423 break;
5424 }
5425
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005426 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005427 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005428 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005429 break;
5430 }
5431
Aart Bik66c158e2018-01-31 12:55:04 -08005432 case DataType::Type::kUint32:
5433 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005434 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01005435 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005436 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005437 }
Roland Levillain4d027112015-07-01 15:41:14 +01005438
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005439 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005440 // Potential implicit null checks, in the case of reference
5441 // arrays, are handled in the previous switch statement.
5442 } else {
5443 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005444 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005445}
5446
5447void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005448 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005449
5450 bool needs_write_barrier =
5451 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005452 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005453
Vladimir Markoca6fff82017-10-03 14:49:14 +01005454 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005455 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005456 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005457
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005458 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005459 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005460 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005461 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005462 } else {
5463 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5464 }
5465
5466 if (needs_write_barrier) {
5467 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005468 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005469 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005470 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005471}
5472
5473void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5474 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005475 Location array_loc = locations->InAt(0);
5476 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005477 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005478 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005479 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005480 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005481 bool needs_write_barrier =
5482 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005483
5484 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005485 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005486 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005487 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005488 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005489 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005490 if (value.IsRegister()) {
5491 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005492 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005493 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005494 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005495 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005496 break;
5497 }
5498
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005499 case DataType::Type::kUint16:
5500 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005501 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005502 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005503 if (value.IsRegister()) {
5504 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005505 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005506 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005507 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005508 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005509 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005510 break;
5511 }
5512
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005513 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005514 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005515 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005516
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005517 if (!value.IsRegister()) {
5518 // Just setting null.
5519 DCHECK(instruction->InputAt(2)->IsNullConstant());
5520 DCHECK(value.IsConstant()) << value;
5521 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005522 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005523 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005524 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005525 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005526 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005527
5528 DCHECK(needs_write_barrier);
5529 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005530 Location temp_loc = locations->GetTemp(0);
5531 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005532
5533 bool can_value_be_null = instruction->GetValueCanBeNull();
5534 NearLabel do_store;
5535 if (can_value_be_null) {
5536 __ testl(register_value, register_value);
5537 __ j(kEqual, &do_store);
5538 }
5539
5540 SlowPathCode* slow_path = nullptr;
5541 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005542 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005543 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005544
5545 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5546 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5547 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005548
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005549 // Note that when Baker read barriers are enabled, the type
5550 // checks are performed without read barriers. This is fine,
5551 // even in the case where a class object is in the from-space
5552 // after the flip, as a comparison involving such a type would
5553 // not produce a false positive; it may of course produce a
5554 // false negative, in which case we would take the ArraySet
5555 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005556
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005557 // /* HeapReference<Class> */ temp = array->klass_
5558 __ movl(temp, Address(array, class_offset));
5559 codegen_->MaybeRecordImplicitNullCheck(instruction);
5560 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005561
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005562 // /* HeapReference<Class> */ temp = temp->component_type_
5563 __ movl(temp, Address(temp, component_offset));
5564 // If heap poisoning is enabled, no need to unpoison `temp`
5565 // nor the object reference in `register_value->klass`, as
5566 // we are comparing two poisoned references.
5567 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005568
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005569 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005570 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005571 __ j(kEqual, &do_put);
5572 // If heap poisoning is enabled, the `temp` reference has
5573 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005574 __ MaybeUnpoisonHeapReference(temp);
5575
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005576 // If heap poisoning is enabled, no need to unpoison the
5577 // heap reference loaded below, as it is only used for a
5578 // comparison with null.
5579 __ cmpl(Address(temp, super_offset), Immediate(0));
5580 __ j(kNotEqual, slow_path->GetEntryLabel());
5581 __ Bind(&do_put);
5582 } else {
5583 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005584 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005585 }
5586
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005587 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5588 codegen_->MarkGCCard(
5589 temp, card, array, value.AsRegister<CpuRegister>(), /* value_can_be_null= */ false);
5590
5591 if (can_value_be_null) {
5592 DCHECK(do_store.IsLinked());
5593 __ Bind(&do_store);
5594 }
5595
5596 Location source = value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005597 if (kPoisonHeapReferences) {
5598 __ movl(temp, register_value);
5599 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005600 source = temp_loc;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005601 }
5602
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005603 __ movl(address, source.AsRegister<CpuRegister>());
5604
5605 if (can_value_be_null || !needs_type_check) {
5606 codegen_->MaybeRecordImplicitNullCheck(instruction);
5607 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005608
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005609 if (slow_path != nullptr) {
5610 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005611 }
5612
5613 break;
5614 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005615
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005616 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005617 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005618 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005619 if (value.IsRegister()) {
5620 __ movl(address, value.AsRegister<CpuRegister>());
5621 } else {
5622 DCHECK(value.IsConstant()) << value;
5623 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5624 __ movl(address, Immediate(v));
5625 }
5626 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005627 break;
5628 }
5629
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005630 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005631 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005632 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005633 if (value.IsRegister()) {
5634 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005635 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005636 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005637 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005638 Address address_high =
5639 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005640 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005641 }
5642 break;
5643 }
5644
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005645 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005646 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005647 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005648 if (value.IsFpuRegister()) {
5649 __ movss(address, value.AsFpuRegister<XmmRegister>());
5650 } else {
5651 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005652 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005653 __ movl(address, Immediate(v));
5654 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005655 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005656 break;
5657 }
5658
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005659 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005660 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005661 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005662 if (value.IsFpuRegister()) {
5663 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5664 codegen_->MaybeRecordImplicitNullCheck(instruction);
5665 } else {
5666 int64_t v =
5667 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005668 Address address_high =
5669 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005670 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5671 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005672 break;
5673 }
5674
Aart Bik66c158e2018-01-31 12:55:04 -08005675 case DataType::Type::kUint32:
5676 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005677 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005678 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005679 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005680 }
5681}
5682
5683void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005684 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005685 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005686 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005687 if (!instruction->IsEmittedAtUseSite()) {
5688 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5689 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005690}
5691
5692void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005693 if (instruction->IsEmittedAtUseSite()) {
5694 return;
5695 }
5696
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005697 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005698 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005699 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5700 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005701 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005702 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005703 // Mask out most significant bit in case the array is String's array of char.
5704 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005705 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005706 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005707}
5708
5709void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005710 RegisterSet caller_saves = RegisterSet::Empty();
5711 InvokeRuntimeCallingConvention calling_convention;
5712 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5713 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5714 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005715 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005716 HInstruction* length = instruction->InputAt(1);
5717 if (!length->IsEmittedAtUseSite()) {
5718 locations->SetInAt(1, Location::RegisterOrConstant(length));
5719 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005720}
5721
5722void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5723 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005724 Location index_loc = locations->InAt(0);
5725 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005726 SlowPathCode* slow_path =
5727 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005728
Mark Mendell99dbd682015-04-22 16:18:52 -04005729 if (length_loc.IsConstant()) {
5730 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5731 if (index_loc.IsConstant()) {
5732 // BCE will remove the bounds check if we are guarenteed to pass.
5733 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5734 if (index < 0 || index >= length) {
5735 codegen_->AddSlowPath(slow_path);
5736 __ jmp(slow_path->GetEntryLabel());
5737 } else {
5738 // Some optimization after BCE may have generated this, and we should not
5739 // generate a bounds check if it is a valid range.
5740 }
5741 return;
5742 }
5743
5744 // We have to reverse the jump condition because the length is the constant.
5745 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5746 __ cmpl(index_reg, Immediate(length));
5747 codegen_->AddSlowPath(slow_path);
5748 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005749 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005750 HInstruction* array_length = instruction->InputAt(1);
5751 if (array_length->IsEmittedAtUseSite()) {
5752 // Address the length field in the array.
5753 DCHECK(array_length->IsArrayLength());
5754 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5755 Location array_loc = array_length->GetLocations()->InAt(0);
5756 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005757 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005758 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5759 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005760 CpuRegister length_reg = CpuRegister(TMP);
5761 __ movl(length_reg, array_len);
5762 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005763 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005764 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005765 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005766 // Checking the bound for general case:
5767 // Array of char or String's array when the compression feature off.
5768 if (index_loc.IsConstant()) {
5769 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5770 __ cmpl(array_len, Immediate(value));
5771 } else {
5772 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5773 }
5774 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005775 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005776 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005777 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005778 }
5779 codegen_->AddSlowPath(slow_path);
5780 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005781 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005782}
5783
5784void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5785 CpuRegister card,
5786 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005787 CpuRegister value,
5788 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005789 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005790 if (value_can_be_null) {
5791 __ testl(value, value);
5792 __ j(kEqual, &is_null);
5793 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005794 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005795 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005796 /* no_rip= */ true));
Roland Levillainc73f0522018-08-14 15:16:50 +01005797 // Calculate the offset (in the card table) of the card corresponding to
5798 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005799 __ movq(temp, object);
5800 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005801 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5802 // `object`'s card.
5803 //
5804 // Register `card` contains the address of the card table. Note that the card
5805 // table's base is biased during its creation so that it always starts at an
5806 // address whose least-significant byte is equal to `kCardDirty` (see
5807 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5808 // below writes the `kCardDirty` (byte) value into the `object`'s card
5809 // (located at `card + object >> kCardShift`).
5810 //
5811 // This dual use of the value in register `card` (1. to calculate the location
5812 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5813 // (no need to explicitly load `kCardDirty` as an immediate value).
Roland Levillain4d027112015-07-01 15:41:14 +01005814 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005815 if (value_can_be_null) {
5816 __ Bind(&is_null);
5817 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005818}
5819
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005820void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005821 LOG(FATAL) << "Unimplemented";
5822}
5823
5824void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005825 if (instruction->GetNext()->IsSuspendCheck() &&
5826 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5827 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5828 // The back edge will generate the suspend check.
5829 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5830 }
5831
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005832 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5833}
5834
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005835void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005836 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5837 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005838 // In suspend check slow path, usually there are no caller-save registers at all.
5839 // If SIMD instructions are present, however, we force spilling all live SIMD
5840 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005841 locations->SetCustomSlowPathCallerSaves(
5842 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005843}
5844
5845void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005846 HBasicBlock* block = instruction->GetBlock();
5847 if (block->GetLoopInformation() != nullptr) {
5848 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5849 // The back edge will generate the suspend check.
5850 return;
5851 }
5852 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5853 // The goto will generate the suspend check.
5854 return;
5855 }
5856 GenerateSuspendCheck(instruction, nullptr);
5857}
5858
5859void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5860 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005861 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005862 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5863 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005864 slow_path =
5865 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005866 instruction->SetSlowPath(slow_path);
5867 codegen_->AddSlowPath(slow_path);
5868 if (successor != nullptr) {
5869 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005870 }
5871 } else {
5872 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5873 }
5874
Andreas Gampe542451c2016-07-26 09:02:02 -07005875 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005876 /* no_rip= */ true),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005877 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005878 if (successor == nullptr) {
5879 __ j(kNotEqual, slow_path->GetEntryLabel());
5880 __ Bind(slow_path->GetReturnLabel());
5881 } else {
5882 __ j(kEqual, codegen_->GetLabelOf(successor));
5883 __ jmp(slow_path->GetEntryLabel());
5884 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005885}
5886
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005887X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5888 return codegen_->GetAssembler();
5889}
5890
5891void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005892 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005893 Location source = move->GetSource();
5894 Location destination = move->GetDestination();
5895
5896 if (source.IsRegister()) {
5897 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005898 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005899 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005900 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005901 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005902 } else {
5903 DCHECK(destination.IsDoubleStackSlot());
5904 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005905 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005906 }
5907 } else if (source.IsStackSlot()) {
5908 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005909 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005910 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005911 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005912 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005913 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005914 } else {
5915 DCHECK(destination.IsStackSlot());
5916 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5917 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5918 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005919 } else if (source.IsDoubleStackSlot()) {
5920 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005921 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005922 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005923 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005924 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5925 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005926 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005927 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005928 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5929 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5930 }
Aart Bik5576f372017-03-23 16:17:37 -07005931 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005932 if (destination.IsFpuRegister()) {
5933 __ movups(destination.AsFpuRegister<XmmRegister>(),
5934 Address(CpuRegister(RSP), source.GetStackIndex()));
5935 } else {
5936 DCHECK(destination.IsSIMDStackSlot());
5937 size_t high = kX86_64WordSize;
5938 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5939 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5940 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5941 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5942 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005943 } else if (source.IsConstant()) {
5944 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005945 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5946 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005947 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005948 if (value == 0) {
5949 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5950 } else {
5951 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5952 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005953 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005954 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005955 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005956 }
5957 } else if (constant->IsLongConstant()) {
5958 int64_t value = constant->AsLongConstant()->GetValue();
5959 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005960 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005961 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005962 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005963 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005964 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005965 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005966 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005967 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005968 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005969 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005970 } else {
5971 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005972 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005973 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5974 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005975 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005976 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005977 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005978 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005979 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005980 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005981 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005982 } else {
5983 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005984 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005985 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005986 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005987 } else if (source.IsFpuRegister()) {
5988 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005989 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005990 } else if (destination.IsStackSlot()) {
5991 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005992 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005993 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005994 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005995 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005996 } else {
5997 DCHECK(destination.IsSIMDStackSlot());
5998 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5999 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006000 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006001 }
6002}
6003
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006004void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006005 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006006 __ movl(Address(CpuRegister(RSP), mem), reg);
6007 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006008}
6009
Mark Mendell8a1c7282015-06-29 15:41:28 -04006010void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
6011 __ movq(CpuRegister(TMP), reg1);
6012 __ movq(reg1, reg2);
6013 __ movq(reg2, CpuRegister(TMP));
6014}
6015
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006016void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
6017 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
6018 __ movq(Address(CpuRegister(RSP), mem), reg);
6019 __ movq(reg, CpuRegister(TMP));
6020}
6021
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006022void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
6023 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
6024 __ movss(Address(CpuRegister(RSP), mem), reg);
6025 __ movd(reg, CpuRegister(TMP));
6026}
6027
6028void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
6029 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
6030 __ movsd(Address(CpuRegister(RSP), mem), reg);
6031 __ movd(reg, CpuRegister(TMP));
6032}
6033
Aart Bikcfe50bb2017-12-12 14:54:12 -08006034void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
6035 size_t extra_slot = 2 * kX86_64WordSize;
6036 __ subq(CpuRegister(RSP), Immediate(extra_slot));
6037 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
6038 ExchangeMemory64(0, mem + extra_slot, 2);
6039 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
6040 __ addq(CpuRegister(RSP), Immediate(extra_slot));
6041}
6042
6043void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
6044 ScratchRegisterScope ensure_scratch(
6045 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
6046
6047 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
6048 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
6049 __ movl(CpuRegister(ensure_scratch.GetRegister()),
6050 Address(CpuRegister(RSP), mem2 + stack_offset));
6051 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
6052 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
6053 CpuRegister(ensure_scratch.GetRegister()));
6054}
6055
6056void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
6057 ScratchRegisterScope ensure_scratch(
6058 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
6059
6060 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
6061
6062 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
6063 for (int i = 0; i < num_of_qwords; i++) {
6064 __ movq(CpuRegister(TMP),
6065 Address(CpuRegister(RSP), mem1 + stack_offset));
6066 __ movq(CpuRegister(ensure_scratch.GetRegister()),
6067 Address(CpuRegister(RSP), mem2 + stack_offset));
6068 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
6069 CpuRegister(TMP));
6070 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
6071 CpuRegister(ensure_scratch.GetRegister()));
6072 stack_offset += kX86_64WordSize;
6073 }
6074}
6075
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006076void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006077 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006078 Location source = move->GetSource();
6079 Location destination = move->GetDestination();
6080
6081 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04006082 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006083 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006084 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006085 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006086 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006087 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006088 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006089 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006090 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006091 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006092 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006093 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006094 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006095 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006096 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
6097 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
6098 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006099 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006100 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006101 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006102 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006103 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006104 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006105 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006106 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08006107 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
6108 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
6109 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
6110 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6111 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
6112 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006113 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006114 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006115 }
6116}
6117
6118
6119void ParallelMoveResolverX86_64::SpillScratch(int reg) {
6120 __ pushq(CpuRegister(reg));
6121}
6122
6123
6124void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
6125 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006126}
6127
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006128void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006129 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00006130 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
6131 const size_t status_byte_offset =
6132 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01006133 constexpr uint32_t shifted_visibly_initialized_value =
6134 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00006135
Vladimir Markobf121912019-06-04 13:49:05 +01006136 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00006137 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006138 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006139}
6140
Vladimir Marko175e7862018-03-27 09:03:13 +00006141void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
6142 CpuRegister temp) {
6143 uint32_t path_to_root = check->GetBitstringPathToRoot();
6144 uint32_t mask = check->GetBitstringMask();
6145 DCHECK(IsPowerOfTwo(mask + 1));
6146 size_t mask_bits = WhichPowerOf2(mask + 1);
6147
6148 if (mask_bits == 16u) {
6149 // Compare the bitstring in memory.
6150 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
6151 } else {
6152 // /* uint32_t */ temp = temp->status_
6153 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
6154 // Compare the bitstring bits using SUB.
6155 __ subl(temp, Immediate(path_to_root));
6156 // Shift out bits that do not contribute to the comparison.
6157 __ shll(temp, Immediate(32u - mask_bits));
6158 }
6159}
6160
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006161HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
6162 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006163 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006164 case HLoadClass::LoadKind::kInvalid:
6165 LOG(FATAL) << "UNREACHABLE";
6166 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006167 case HLoadClass::LoadKind::kReferrersClass:
6168 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006169 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006170 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006171 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko8f63f102020-09-28 12:10:28 +01006172 case HLoadClass::LoadKind::kBssEntryPublic:
6173 case HLoadClass::LoadKind::kBssEntryPackage:
Vladimir Marko695348f2020-05-19 14:42:02 +01006174 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006175 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006176 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006177 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01006178 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006179 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006180 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006181 break;
6182 }
6183 return desired_class_load_kind;
6184}
6185
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006186void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006187 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006188 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006189 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00006190 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006191 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00006192 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00006193 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006194 return;
6195 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01006196 DCHECK_EQ(cls->NeedsAccessCheck(),
6197 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
6198 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006199
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006200 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6201 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006202 ? LocationSummary::kCallOnSlowPath
6203 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006204 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006205 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006206 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006207 }
6208
Vladimir Marko41559982017-01-06 14:04:23 +00006209 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006210 locations->SetInAt(0, Location::RequiresRegister());
6211 }
6212 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006213 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6214 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6215 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006216 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006217 } else {
6218 // For non-Baker read barrier we have a temp-clobbering call.
6219 }
6220 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006221}
6222
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006223Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006224 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006225 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006226 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006227 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006228 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006229 PatchInfo<Label>* info = &jit_class_patches_.back();
6230 return &info->label;
6231}
6232
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006233// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6234// move.
6235void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006236 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006237 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006238 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006239 return;
6240 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01006241 DCHECK_EQ(cls->NeedsAccessCheck(),
6242 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
6243 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Calin Juravle580b6092015-10-06 17:35:58 +01006244
Vladimir Marko41559982017-01-06 14:04:23 +00006245 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006246 Location out_loc = locations->Out();
6247 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006248
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006249 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6250 ? kWithoutReadBarrier
6251 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006252 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00006253 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006254 case HLoadClass::LoadKind::kReferrersClass: {
6255 DCHECK(!cls->CanCallRuntime());
6256 DCHECK(!cls->MustGenerateClinitCheck());
6257 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6258 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
6259 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006260 cls,
6261 out_loc,
6262 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006263 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006264 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006265 break;
6266 }
6267 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01006268 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6269 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006270 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006271 __ leal(out,
6272 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006273 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006274 break;
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006275 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006276 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006277 __ movl(out,
6278 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Markode91ca92020-10-27 13:41:40 +00006279 codegen_->RecordBootImageRelRoPatch(CodeGenerator::GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006280 break;
6281 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01006282 case HLoadClass::LoadKind::kBssEntry:
6283 case HLoadClass::LoadKind::kBssEntryPublic:
6284 case HLoadClass::LoadKind::kBssEntryPackage: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006285 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006286 /* no_rip= */ false);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006287 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6288 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
6289 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006290 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006291 generate_null_check = true;
6292 break;
6293 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006294 case HLoadClass::LoadKind::kJitBootImageAddress: {
6295 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6296 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6297 DCHECK_NE(address, 0u);
6298 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6299 break;
6300 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006301 case HLoadClass::LoadKind::kJitTableAddress: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006302 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006303 /* no_rip= */ true);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006304 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006305 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006306 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006307 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006308 break;
6309 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006310 default:
6311 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
6312 UNREACHABLE();
6313 }
6314
6315 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6316 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006317 SlowPathCode* slow_path =
6318 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006319 codegen_->AddSlowPath(slow_path);
6320 if (generate_null_check) {
6321 __ testl(out, out);
6322 __ j(kEqual, slow_path->GetEntryLabel());
6323 }
6324 if (cls->MustGenerateClinitCheck()) {
6325 GenerateClassInitializationCheck(slow_path, out);
6326 } else {
6327 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006328 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006329 }
6330}
6331
6332void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
6333 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006334 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006335 locations->SetInAt(0, Location::RequiresRegister());
6336 if (check->HasUses()) {
6337 locations->SetOut(Location::SameAsFirstInput());
6338 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006339 // Rely on the type initialization to save everything we need.
6340 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006341}
6342
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006343void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6344 // Custom calling convention: RAX serves as both input and output.
6345 Location location = Location::RegisterLocation(RAX);
6346 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6347}
6348
6349void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6350 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6351}
6352
Orion Hodson18259d72018-04-12 11:18:23 +01006353void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6354 // Custom calling convention: RAX serves as both input and output.
6355 Location location = Location::RegisterLocation(RAX);
6356 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6357}
6358
6359void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6360 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6361}
6362
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006363void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006364 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006365 SlowPathCode* slow_path =
6366 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006367 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006368 GenerateClassInitializationCheck(slow_path,
6369 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006370}
6371
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006372HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
6373 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006374 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006375 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006376 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006377 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko695348f2020-05-19 14:42:02 +01006378 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006379 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006380 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006381 case HLoadString::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01006382 DCHECK(GetCompilerOptions().IsJitCompiler());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006383 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006384 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006385 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006386 }
6387 return desired_string_load_kind;
6388}
6389
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006390void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006391 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006392 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006393 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006394 locations->SetOut(Location::RegisterLocation(RAX));
6395 } else {
6396 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006397 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
6398 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006399 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006400 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006401 } else {
6402 // For non-Baker read barrier we have a temp-clobbering call.
6403 }
6404 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006405 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006406}
6407
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006408Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006409 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006410 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006411 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006412 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006413 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006414 PatchInfo<Label>* info = &jit_string_patches_.back();
6415 return &info->label;
6416}
6417
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006418// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6419// move.
6420void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006421 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006422 Location out_loc = locations->Out();
6423 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006424
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006425 switch (load->GetLoadKind()) {
6426 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006427 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6428 codegen_->GetCompilerOptions().IsBootImageExtension());
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006429 __ leal(out,
6430 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006431 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006432 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006433 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006434 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006435 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006436 __ movl(out,
6437 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Markode91ca92020-10-27 13:41:40 +00006438 codegen_->RecordBootImageRelRoPatch(CodeGenerator::GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006439 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006440 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006441 case HLoadString::LoadKind::kBssEntry: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006442 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006443 /* no_rip= */ false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006444 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6445 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006446 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006447 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006448 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006449 codegen_->AddSlowPath(slow_path);
6450 __ testl(out, out);
6451 __ j(kEqual, slow_path->GetEntryLabel());
6452 __ Bind(slow_path->GetExitLabel());
6453 return;
6454 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006455 case HLoadString::LoadKind::kJitBootImageAddress: {
6456 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6457 DCHECK_NE(address, 0u);
6458 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6459 return;
6460 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006461 case HLoadString::LoadKind::kJitTableAddress: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006462 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006463 /* no_rip= */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006464 Label* fixup_label = codegen_->NewJitRootStringPatch(
6465 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006466 // /* GcRoot<mirror::String> */ out = *address
6467 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6468 return;
6469 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006470 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006471 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006472 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006473
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006474 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006475 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006476 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006477 codegen_->InvokeRuntime(kQuickResolveString,
6478 load,
6479 load->GetDexPc());
6480 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006481}
6482
David Brazdilcb1c0552015-08-04 16:22:25 +01006483static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006484 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08006485 /* no_rip= */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006486}
6487
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006488void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6489 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006490 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006491 locations->SetOut(Location::RequiresRegister());
6492}
6493
6494void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006495 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6496}
6497
6498void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006499 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006500}
6501
6502void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6503 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006504}
6505
6506void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006507 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6508 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006509 InvokeRuntimeCallingConvention calling_convention;
6510 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6511}
6512
6513void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006514 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006515 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006516}
6517
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006518// Temp is used for read barrier.
6519static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6520 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006521 !kUseBakerReadBarrier &&
6522 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006523 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006524 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6525 return 1;
6526 }
6527 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006528}
6529
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006530// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6531// interface pointer, the current interface is compared in memory.
6532// The other checks have one temp for loading the object's class.
6533static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6534 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6535 return 2;
6536 }
6537 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006538}
6539
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006540void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006541 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006542 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006543 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006544 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006545 case TypeCheckKind::kExactCheck:
6546 case TypeCheckKind::kAbstractClassCheck:
6547 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006548 case TypeCheckKind::kArrayObjectCheck: {
6549 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6550 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6551 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006552 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006553 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006554 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006555 case TypeCheckKind::kUnresolvedCheck:
6556 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006557 call_kind = LocationSummary::kCallOnSlowPath;
6558 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006559 case TypeCheckKind::kBitstringCheck:
6560 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006561 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006562
Vladimir Markoca6fff82017-10-03 14:49:14 +01006563 LocationSummary* locations =
6564 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006565 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006566 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006567 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006568 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006569 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6570 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6571 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6572 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6573 } else {
6574 locations->SetInAt(1, Location::Any());
6575 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006576 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6577 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006578 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006579}
6580
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006581void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006582 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006583 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006584 Location obj_loc = locations->InAt(0);
6585 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006586 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006587 Location out_loc = locations->Out();
6588 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006589 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6590 DCHECK_LE(num_temps, 1u);
6591 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006592 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006593 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6594 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6595 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006596 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006597 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006598
6599 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006600 // Avoid null check if we know obj is not null.
6601 if (instruction->MustDoNullCheck()) {
6602 __ testl(obj, obj);
6603 __ j(kEqual, &zero);
6604 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006605
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006606 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006607 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006608 ReadBarrierOption read_barrier_option =
6609 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006610 // /* HeapReference<Class> */ out = obj->klass_
6611 GenerateReferenceLoadTwoRegisters(instruction,
6612 out_loc,
6613 obj_loc,
6614 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006615 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006616 if (cls.IsRegister()) {
6617 __ cmpl(out, cls.AsRegister<CpuRegister>());
6618 } else {
6619 DCHECK(cls.IsStackSlot()) << cls;
6620 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6621 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006622 if (zero.IsLinked()) {
6623 // Classes must be equal for the instanceof to succeed.
6624 __ j(kNotEqual, &zero);
6625 __ movl(out, Immediate(1));
6626 __ jmp(&done);
6627 } else {
6628 __ setcc(kEqual, out);
6629 // setcc only sets the low byte.
6630 __ andl(out, Immediate(1));
6631 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006632 break;
6633 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006634
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006635 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006636 ReadBarrierOption read_barrier_option =
6637 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006638 // /* HeapReference<Class> */ out = obj->klass_
6639 GenerateReferenceLoadTwoRegisters(instruction,
6640 out_loc,
6641 obj_loc,
6642 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006643 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006644 // If the class is abstract, we eagerly fetch the super class of the
6645 // object to avoid doing a comparison we know will fail.
6646 NearLabel loop, success;
6647 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006648 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006649 GenerateReferenceLoadOneRegister(instruction,
6650 out_loc,
6651 super_offset,
6652 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006653 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006654 __ testl(out, out);
6655 // If `out` is null, we use it for the result, and jump to `done`.
6656 __ j(kEqual, &done);
6657 if (cls.IsRegister()) {
6658 __ cmpl(out, cls.AsRegister<CpuRegister>());
6659 } else {
6660 DCHECK(cls.IsStackSlot()) << cls;
6661 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6662 }
6663 __ j(kNotEqual, &loop);
6664 __ movl(out, Immediate(1));
6665 if (zero.IsLinked()) {
6666 __ jmp(&done);
6667 }
6668 break;
6669 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006670
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006671 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006672 ReadBarrierOption read_barrier_option =
6673 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006674 // /* HeapReference<Class> */ out = obj->klass_
6675 GenerateReferenceLoadTwoRegisters(instruction,
6676 out_loc,
6677 obj_loc,
6678 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006679 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006680 // Walk over the class hierarchy to find a match.
6681 NearLabel loop, success;
6682 __ Bind(&loop);
6683 if (cls.IsRegister()) {
6684 __ cmpl(out, cls.AsRegister<CpuRegister>());
6685 } else {
6686 DCHECK(cls.IsStackSlot()) << cls;
6687 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6688 }
6689 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006690 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006691 GenerateReferenceLoadOneRegister(instruction,
6692 out_loc,
6693 super_offset,
6694 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006695 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006696 __ testl(out, out);
6697 __ j(kNotEqual, &loop);
6698 // If `out` is null, we use it for the result, and jump to `done`.
6699 __ jmp(&done);
6700 __ Bind(&success);
6701 __ movl(out, Immediate(1));
6702 if (zero.IsLinked()) {
6703 __ jmp(&done);
6704 }
6705 break;
6706 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006707
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006708 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006709 ReadBarrierOption read_barrier_option =
6710 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006711 // /* HeapReference<Class> */ out = obj->klass_
6712 GenerateReferenceLoadTwoRegisters(instruction,
6713 out_loc,
6714 obj_loc,
6715 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006716 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006717 // Do an exact check.
6718 NearLabel exact_check;
6719 if (cls.IsRegister()) {
6720 __ cmpl(out, cls.AsRegister<CpuRegister>());
6721 } else {
6722 DCHECK(cls.IsStackSlot()) << cls;
6723 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6724 }
6725 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006726 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006727 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006728 GenerateReferenceLoadOneRegister(instruction,
6729 out_loc,
6730 component_offset,
6731 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006732 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006733 __ testl(out, out);
6734 // If `out` is null, we use it for the result, and jump to `done`.
6735 __ j(kEqual, &done);
6736 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6737 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006738 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006739 __ movl(out, Immediate(1));
6740 __ jmp(&done);
6741 break;
6742 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006743
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006744 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006745 // No read barrier since the slow path will retry upon failure.
6746 // /* HeapReference<Class> */ out = obj->klass_
6747 GenerateReferenceLoadTwoRegisters(instruction,
6748 out_loc,
6749 obj_loc,
6750 class_offset,
6751 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006752 if (cls.IsRegister()) {
6753 __ cmpl(out, cls.AsRegister<CpuRegister>());
6754 } else {
6755 DCHECK(cls.IsStackSlot()) << cls;
6756 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6757 }
6758 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006759 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006760 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006761 codegen_->AddSlowPath(slow_path);
6762 __ j(kNotEqual, slow_path->GetEntryLabel());
6763 __ movl(out, Immediate(1));
6764 if (zero.IsLinked()) {
6765 __ jmp(&done);
6766 }
6767 break;
6768 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006769
Calin Juravle98893e12015-10-02 21:05:03 +01006770 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006771 case TypeCheckKind::kInterfaceCheck: {
6772 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006773 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006774 // cases.
6775 //
6776 // We cannot directly call the InstanceofNonTrivial runtime
6777 // entry point without resorting to a type checking slow path
6778 // here (i.e. by calling InvokeRuntime directly), as it would
6779 // require to assign fixed registers for the inputs of this
6780 // HInstanceOf instruction (following the runtime calling
6781 // convention), which might be cluttered by the potential first
6782 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006783 //
6784 // TODO: Introduce a new runtime entry point taking the object
6785 // to test (instead of its class) as argument, and let it deal
6786 // with the read barrier issues. This will let us refactor this
6787 // case of the `switch` code as it was previously (with a direct
6788 // call to the runtime not using a type checking slow path).
6789 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006790 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006791 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006792 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006793 codegen_->AddSlowPath(slow_path);
6794 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006795 if (zero.IsLinked()) {
6796 __ jmp(&done);
6797 }
6798 break;
6799 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006800
6801 case TypeCheckKind::kBitstringCheck: {
6802 // /* HeapReference<Class> */ temp = obj->klass_
6803 GenerateReferenceLoadTwoRegisters(instruction,
6804 out_loc,
6805 obj_loc,
6806 class_offset,
6807 kWithoutReadBarrier);
6808
6809 GenerateBitstringTypeCheckCompare(instruction, out);
6810 if (zero.IsLinked()) {
6811 __ j(kNotEqual, &zero);
6812 __ movl(out, Immediate(1));
6813 __ jmp(&done);
6814 } else {
6815 __ setcc(kEqual, out);
6816 // setcc only sets the low byte.
6817 __ andl(out, Immediate(1));
6818 }
6819 break;
6820 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006821 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006822
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006823 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006824 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006825 __ xorl(out, out);
6826 }
6827
6828 if (done.IsLinked()) {
6829 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006830 }
6831
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006832 if (slow_path != nullptr) {
6833 __ Bind(slow_path->GetExitLabel());
6834 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006835}
6836
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006837void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006838 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006839 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006840 LocationSummary* locations =
6841 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006842 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006843 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6844 // Require a register for the interface check since there is a loop that compares the class to
6845 // a memory address.
6846 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006847 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6848 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6849 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6850 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006851 } else {
6852 locations->SetInAt(1, Location::Any());
6853 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006854 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6855 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006856}
6857
6858void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006859 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006860 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006861 Location obj_loc = locations->InAt(0);
6862 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006863 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006864 Location temp_loc = locations->GetTemp(0);
6865 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006866 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6867 DCHECK_GE(num_temps, 1u);
6868 DCHECK_LE(num_temps, 2u);
6869 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006870 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6871 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6872 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6873 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6874 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6875 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006876 const uint32_t object_array_data_offset =
6877 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006878
Vladimir Marko87584542017-12-12 17:47:52 +00006879 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006880 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006881 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6882 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006883 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006884
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006885
6886 NearLabel done;
6887 // Avoid null check if we know obj is not null.
6888 if (instruction->MustDoNullCheck()) {
6889 __ testl(obj, obj);
6890 __ j(kEqual, &done);
6891 }
6892
Roland Levillain0d5a2812015-11-13 10:07:31 +00006893 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006894 case TypeCheckKind::kExactCheck:
6895 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006896 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006897 GenerateReferenceLoadTwoRegisters(instruction,
6898 temp_loc,
6899 obj_loc,
6900 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006901 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006902 if (cls.IsRegister()) {
6903 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6904 } else {
6905 DCHECK(cls.IsStackSlot()) << cls;
6906 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6907 }
6908 // Jump to slow path for throwing the exception or doing a
6909 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006910 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006911 break;
6912 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006913
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006914 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006915 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006916 GenerateReferenceLoadTwoRegisters(instruction,
6917 temp_loc,
6918 obj_loc,
6919 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006920 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006921 // If the class is abstract, we eagerly fetch the super class of the
6922 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006923 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006924 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006925 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006926 GenerateReferenceLoadOneRegister(instruction,
6927 temp_loc,
6928 super_offset,
6929 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006930 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006931
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006932 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6933 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006934 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006935 // Otherwise, compare the classes.
6936 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006937 if (cls.IsRegister()) {
6938 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6939 } else {
6940 DCHECK(cls.IsStackSlot()) << cls;
6941 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6942 }
6943 __ j(kNotEqual, &loop);
6944 break;
6945 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006946
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006947 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006948 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006949 GenerateReferenceLoadTwoRegisters(instruction,
6950 temp_loc,
6951 obj_loc,
6952 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006953 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006954 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006955 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006956 __ Bind(&loop);
6957 if (cls.IsRegister()) {
6958 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6959 } else {
6960 DCHECK(cls.IsStackSlot()) << cls;
6961 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6962 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006963 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006964
Roland Levillain0d5a2812015-11-13 10:07:31 +00006965 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006966 GenerateReferenceLoadOneRegister(instruction,
6967 temp_loc,
6968 super_offset,
6969 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006970 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006971
6972 // If the class reference currently in `temp` is not null, jump
6973 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006974 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006975 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006976 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006977 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006978 break;
6979 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006980
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006981 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006982 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006983 GenerateReferenceLoadTwoRegisters(instruction,
6984 temp_loc,
6985 obj_loc,
6986 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006987 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006988 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006989 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006990 if (cls.IsRegister()) {
6991 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6992 } else {
6993 DCHECK(cls.IsStackSlot()) << cls;
6994 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6995 }
6996 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006997
6998 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006999 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007000 GenerateReferenceLoadOneRegister(instruction,
7001 temp_loc,
7002 component_offset,
7003 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007004 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007005
7006 // If the component type is not null (i.e. the object is indeed
7007 // an array), jump to label `check_non_primitive_component_type`
7008 // to further check that this component type is not a primitive
7009 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007010 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007011 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007012 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007013 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007014 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007015 break;
7016 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007017
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007018 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07007019 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007020 //
7021 // We cannot directly call the CheckCast runtime entry point
7022 // without resorting to a type checking slow path here (i.e. by
7023 // calling InvokeRuntime directly), as it would require to
7024 // assign fixed registers for the inputs of this HInstanceOf
7025 // instruction (following the runtime calling convention), which
7026 // might be cluttered by the potential first read barrier
7027 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007028 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007029 break;
7030 }
7031
Vladimir Marko175e7862018-03-27 09:03:13 +00007032 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007033 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
7034 // We can not get false positives by doing this.
7035 // /* HeapReference<Class> */ temp = obj->klass_
7036 GenerateReferenceLoadTwoRegisters(instruction,
7037 temp_loc,
7038 obj_loc,
7039 class_offset,
7040 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07007041
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007042 // /* HeapReference<Class> */ temp = temp->iftable_
7043 GenerateReferenceLoadTwoRegisters(instruction,
7044 temp_loc,
7045 temp_loc,
7046 iftable_offset,
7047 kWithoutReadBarrier);
7048 // Iftable is never null.
7049 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
7050 // Maybe poison the `cls` for direct comparison with memory.
7051 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
7052 // Loop through the iftable and check if any class matches.
7053 NearLabel start_loop;
7054 __ Bind(&start_loop);
7055 // Need to subtract first to handle the empty array case.
7056 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
7057 __ j(kNegative, type_check_slow_path->GetEntryLabel());
7058 // Go to next interface if the classes do not match.
7059 __ cmpl(cls.AsRegister<CpuRegister>(),
7060 CodeGeneratorX86_64::ArrayAddress(temp,
7061 maybe_temp2_loc,
7062 TIMES_4,
7063 object_array_data_offset));
7064 __ j(kNotEqual, &start_loop); // Return if same class.
7065 // If `cls` was poisoned above, unpoison it.
7066 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007067 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00007068 }
7069
7070 case TypeCheckKind::kBitstringCheck: {
7071 // /* HeapReference<Class> */ temp = obj->klass_
7072 GenerateReferenceLoadTwoRegisters(instruction,
7073 temp_loc,
7074 obj_loc,
7075 class_offset,
7076 kWithoutReadBarrier);
7077
7078 GenerateBitstringTypeCheckCompare(instruction, temp);
7079 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
7080 break;
7081 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007082 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007083
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007084 if (done.IsLinked()) {
7085 __ Bind(&done);
7086 }
7087
Roland Levillain0d5a2812015-11-13 10:07:31 +00007088 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007089}
7090
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007091void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007092 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7093 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007094 InvokeRuntimeCallingConvention calling_convention;
7095 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7096}
7097
7098void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01007099 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01007100 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01007101 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00007102 if (instruction->IsEnter()) {
7103 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7104 } else {
7105 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7106 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007107}
7108
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05307109void LocationsBuilderX86_64::VisitX86AndNot(HX86AndNot* instruction) {
7110 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7111 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
7112 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7113 locations->SetInAt(0, Location::RequiresRegister());
7114 // There is no immediate variant of negated bitwise and in X86.
7115 locations->SetInAt(1, Location::RequiresRegister());
7116 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7117}
7118
7119void LocationsBuilderX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
7120 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7121 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
7122 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7123 locations->SetInAt(0, Location::RequiresRegister());
7124 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7125}
7126
7127void InstructionCodeGeneratorX86_64::VisitX86AndNot(HX86AndNot* instruction) {
7128 LocationSummary* locations = instruction->GetLocations();
7129 Location first = locations->InAt(0);
7130 Location second = locations->InAt(1);
7131 Location dest = locations->Out();
7132 __ andn(dest.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
7133}
7134
7135void InstructionCodeGeneratorX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
7136 LocationSummary* locations = instruction->GetLocations();
7137 Location src = locations->InAt(0);
7138 Location dest = locations->Out();
7139 switch (instruction->GetOpKind()) {
7140 case HInstruction::kAnd:
7141 __ blsr(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
7142 break;
7143 case HInstruction::kXor:
7144 __ blsmsk(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
7145 break;
7146 default:
7147 LOG(FATAL) << "Unreachable";
7148 }
7149}
7150
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007151void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
7152void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
7153void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
7154
7155void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
7156 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007157 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007158 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
7159 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007160 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04007161 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007162 locations->SetOut(Location::SameAsFirstInput());
7163}
7164
7165void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
7166 HandleBitwiseOperation(instruction);
7167}
7168
7169void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
7170 HandleBitwiseOperation(instruction);
7171}
7172
7173void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
7174 HandleBitwiseOperation(instruction);
7175}
7176
7177void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
7178 LocationSummary* locations = instruction->GetLocations();
7179 Location first = locations->InAt(0);
7180 Location second = locations->InAt(1);
7181 DCHECK(first.Equals(locations->Out()));
7182
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007183 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007184 if (second.IsRegister()) {
7185 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007186 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007187 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007188 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007189 } else {
7190 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007191 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007192 }
7193 } else if (second.IsConstant()) {
7194 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
7195 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007196 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007197 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007198 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007199 } else {
7200 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007201 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007202 }
7203 } else {
7204 Address address(CpuRegister(RSP), second.GetStackIndex());
7205 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007206 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007207 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007208 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007209 } else {
7210 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007211 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007212 }
7213 }
7214 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007215 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007216 CpuRegister first_reg = first.AsRegister<CpuRegister>();
7217 bool second_is_constant = false;
7218 int64_t value = 0;
7219 if (second.IsConstant()) {
7220 second_is_constant = true;
7221 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007222 }
Mark Mendell40741f32015-04-20 22:10:34 -04007223 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007224
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007225 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007226 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007227 if (is_int32_value) {
7228 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
7229 } else {
7230 __ andq(first_reg, codegen_->LiteralInt64Address(value));
7231 }
7232 } else if (second.IsDoubleStackSlot()) {
7233 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007234 } else {
7235 __ andq(first_reg, second.AsRegister<CpuRegister>());
7236 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007237 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007238 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007239 if (is_int32_value) {
7240 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
7241 } else {
7242 __ orq(first_reg, codegen_->LiteralInt64Address(value));
7243 }
7244 } else if (second.IsDoubleStackSlot()) {
7245 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007246 } else {
7247 __ orq(first_reg, second.AsRegister<CpuRegister>());
7248 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007249 } else {
7250 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007251 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007252 if (is_int32_value) {
7253 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
7254 } else {
7255 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
7256 }
7257 } else if (second.IsDoubleStackSlot()) {
7258 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007259 } else {
7260 __ xorq(first_reg, second.AsRegister<CpuRegister>());
7261 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007262 }
7263 }
7264}
7265
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007266void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
7267 HInstruction* instruction,
7268 Location out,
7269 uint32_t offset,
7270 Location maybe_temp,
7271 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007272 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007273 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007274 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007275 if (kUseBakerReadBarrier) {
7276 // Load with fast path based Baker's read barrier.
7277 // /* HeapReference<Object> */ out = *(out + offset)
7278 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007279 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007280 } else {
7281 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007282 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007283 // in the following move operation, as we will need it for the
7284 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007285 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007286 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007287 // /* HeapReference<Object> */ out = *(out + offset)
7288 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007289 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007290 }
7291 } else {
7292 // Plain load with no read barrier.
7293 // /* HeapReference<Object> */ out = *(out + offset)
7294 __ movl(out_reg, Address(out_reg, offset));
7295 __ MaybeUnpoisonHeapReference(out_reg);
7296 }
7297}
7298
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007299void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
7300 HInstruction* instruction,
7301 Location out,
7302 Location obj,
7303 uint32_t offset,
7304 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007305 CpuRegister out_reg = out.AsRegister<CpuRegister>();
7306 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007307 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007308 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007309 if (kUseBakerReadBarrier) {
7310 // Load with fast path based Baker's read barrier.
7311 // /* HeapReference<Object> */ out = *(obj + offset)
7312 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007313 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007314 } else {
7315 // Load with slow path based read barrier.
7316 // /* HeapReference<Object> */ out = *(obj + offset)
7317 __ movl(out_reg, Address(obj_reg, offset));
7318 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7319 }
7320 } else {
7321 // Plain load with no read barrier.
7322 // /* HeapReference<Object> */ out = *(obj + offset)
7323 __ movl(out_reg, Address(obj_reg, offset));
7324 __ MaybeUnpoisonHeapReference(out_reg);
7325 }
7326}
7327
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007328void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
7329 HInstruction* instruction,
7330 Location root,
7331 const Address& address,
7332 Label* fixup_label,
7333 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007334 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007335 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007336 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007337 if (kUseBakerReadBarrier) {
7338 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7339 // Baker's read barrier are used:
7340 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007341 // root = obj.field;
7342 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7343 // if (temp != null) {
7344 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007345 // }
7346
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007347 // /* GcRoot<mirror::Object> */ root = *address
7348 __ movl(root_reg, address);
7349 if (fixup_label != nullptr) {
7350 __ Bind(fixup_label);
7351 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007352 static_assert(
7353 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7354 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7355 "have different sizes.");
7356 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7357 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7358 "have different sizes.");
7359
Vladimir Marko953437b2016-08-24 08:30:46 +00007360 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007361 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007362 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007363 codegen_->AddSlowPath(slow_path);
7364
Roland Levillaind966ce72017-02-09 16:20:14 +00007365 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
7366 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007367 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Andreas Gampe3db70682018-12-26 15:12:03 -08007368 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip= */ true), Immediate(0));
Roland Levillaind966ce72017-02-09 16:20:14 +00007369 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007370 __ j(kNotEqual, slow_path->GetEntryLabel());
7371 __ Bind(slow_path->GetExitLabel());
7372 } else {
7373 // GC root loaded through a slow path for read barriers other
7374 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007375 // /* GcRoot<mirror::Object>* */ root = address
7376 __ leaq(root_reg, address);
7377 if (fixup_label != nullptr) {
7378 __ Bind(fixup_label);
7379 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007380 // /* mirror::Object* */ root = root->Read()
7381 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7382 }
7383 } else {
7384 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007385 // /* GcRoot<mirror::Object> */ root = *address
7386 __ movl(root_reg, address);
7387 if (fixup_label != nullptr) {
7388 __ Bind(fixup_label);
7389 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007390 // Note that GC roots are not affected by heap poisoning, thus we
7391 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007392 }
7393}
7394
7395void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7396 Location ref,
7397 CpuRegister obj,
7398 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007399 bool needs_null_check) {
7400 DCHECK(kEmitCompilerReadBarrier);
7401 DCHECK(kUseBakerReadBarrier);
7402
7403 // /* HeapReference<Object> */ ref = *(obj + offset)
7404 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007405 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007406}
7407
7408void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7409 Location ref,
7410 CpuRegister obj,
7411 uint32_t data_offset,
7412 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007413 bool needs_null_check) {
7414 DCHECK(kEmitCompilerReadBarrier);
7415 DCHECK(kUseBakerReadBarrier);
7416
Roland Levillain3d312422016-06-23 13:53:42 +01007417 static_assert(
7418 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7419 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007420 // /* HeapReference<Object> */ ref =
7421 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007422 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007423 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007424}
7425
7426void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7427 Location ref,
7428 CpuRegister obj,
7429 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007430 bool needs_null_check,
7431 bool always_update_field,
7432 CpuRegister* temp1,
7433 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007434 DCHECK(kEmitCompilerReadBarrier);
7435 DCHECK(kUseBakerReadBarrier);
7436
7437 // In slow path based read barriers, the read barrier call is
7438 // inserted after the original load. However, in fast path based
7439 // Baker's read barriers, we need to perform the load of
7440 // mirror::Object::monitor_ *before* the original reference load.
7441 // This load-load ordering is required by the read barrier.
7442 // The fast path/slow path (for Baker's algorithm) should look like:
7443 //
7444 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7445 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7446 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007447 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007448 // if (is_gray) {
7449 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7450 // }
7451 //
7452 // Note: the original implementation in ReadBarrier::Barrier is
7453 // slightly more complex as:
7454 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007455 // the high-bits of rb_state, which are expected to be all zeroes
7456 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
7457 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007458 // - it performs additional checks that we do not do here for
7459 // performance reasons.
7460
7461 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007462 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7463
Vladimir Marko953437b2016-08-24 08:30:46 +00007464 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007465 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007466 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007467 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7468 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7469 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7470
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007471 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007472 // ref = ReadBarrier::Mark(ref);
7473 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7474 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007475 if (needs_null_check) {
7476 MaybeRecordImplicitNullCheck(instruction);
7477 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007478
7479 // Load fence to prevent load-load reordering.
7480 // Note that this is a no-op, thanks to the x86-64 memory model.
7481 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7482
7483 // The actual reference load.
7484 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007485 __ movl(ref_reg, src); // Flags are unaffected.
7486
7487 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7488 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007489 SlowPathCode* slow_path;
7490 if (always_update_field) {
7491 DCHECK(temp1 != nullptr);
7492 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007493 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007494 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp1, *temp2);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007495 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007496 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007497 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007498 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007499 AddSlowPath(slow_path);
7500
7501 // We have done the "if" of the gray bit check above, now branch based on the flags.
7502 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007503
7504 // Object* ref = ref_addr->AsMirrorPtr()
7505 __ MaybeUnpoisonHeapReference(ref_reg);
7506
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007507 __ Bind(slow_path->GetExitLabel());
7508}
7509
7510void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7511 Location out,
7512 Location ref,
7513 Location obj,
7514 uint32_t offset,
7515 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007516 DCHECK(kEmitCompilerReadBarrier);
7517
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007518 // Insert a slow path based read barrier *after* the reference load.
7519 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007520 // If heap poisoning is enabled, the unpoisoning of the loaded
7521 // reference will be carried out by the runtime within the slow
7522 // path.
7523 //
7524 // Note that `ref` currently does not get unpoisoned (when heap
7525 // poisoning is enabled), which is alright as the `ref` argument is
7526 // not used by the artReadBarrierSlow entry point.
7527 //
7528 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007529 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007530 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7531 AddSlowPath(slow_path);
7532
Roland Levillain0d5a2812015-11-13 10:07:31 +00007533 __ jmp(slow_path->GetEntryLabel());
7534 __ Bind(slow_path->GetExitLabel());
7535}
7536
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007537void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7538 Location out,
7539 Location ref,
7540 Location obj,
7541 uint32_t offset,
7542 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007543 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007544 // Baker's read barriers shall be handled by the fast path
7545 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7546 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007547 // If heap poisoning is enabled, unpoisoning will be taken care of
7548 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007549 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007550 } else if (kPoisonHeapReferences) {
7551 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7552 }
7553}
7554
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007555void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7556 Location out,
7557 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007558 DCHECK(kEmitCompilerReadBarrier);
7559
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007560 // Insert a slow path based read barrier *after* the GC root load.
7561 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007562 // Note that GC roots are not affected by heap poisoning, so we do
7563 // not need to do anything special for this here.
7564 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007565 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007566 AddSlowPath(slow_path);
7567
Roland Levillain0d5a2812015-11-13 10:07:31 +00007568 __ jmp(slow_path->GetEntryLabel());
7569 __ Bind(slow_path->GetExitLabel());
7570}
7571
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007572void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007573 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007574 LOG(FATAL) << "Unreachable";
7575}
7576
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007577void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007578 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007579 LOG(FATAL) << "Unreachable";
7580}
7581
Mark Mendellfe57faa2015-09-18 09:26:15 -04007582// Simple implementation of packed switch - generate cascaded compare/jumps.
7583void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7584 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007585 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007586 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007587 locations->AddTemp(Location::RequiresRegister());
7588 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007589}
7590
7591void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7592 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007593 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007594 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007595 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7596 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7597 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007598 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7599
7600 // Should we generate smaller inline compare/jumps?
7601 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7602 // Figure out the correct compare values and jump conditions.
7603 // Handle the first compare/branch as a special case because it might
7604 // jump to the default case.
7605 DCHECK_GT(num_entries, 2u);
7606 Condition first_condition;
7607 uint32_t index;
7608 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7609 if (lower_bound != 0) {
7610 first_condition = kLess;
7611 __ cmpl(value_reg_in, Immediate(lower_bound));
7612 __ j(first_condition, codegen_->GetLabelOf(default_block));
7613 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7614
7615 index = 1;
7616 } else {
7617 // Handle all the compare/jumps below.
7618 first_condition = kBelow;
7619 index = 0;
7620 }
7621
7622 // Handle the rest of the compare/jumps.
7623 for (; index + 1 < num_entries; index += 2) {
7624 int32_t compare_to_value = lower_bound + index + 1;
7625 __ cmpl(value_reg_in, Immediate(compare_to_value));
7626 // Jump to successors[index] if value < case_value[index].
7627 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7628 // Jump to successors[index + 1] if value == case_value[index + 1].
7629 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7630 }
7631
7632 if (index != num_entries) {
7633 // There are an odd number of entries. Handle the last one.
7634 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007635 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007636 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7637 }
7638
7639 // And the default for any other value.
7640 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7641 __ jmp(codegen_->GetLabelOf(default_block));
7642 }
7643 return;
7644 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007645
7646 // Remove the bias, if needed.
7647 Register value_reg_out = value_reg_in.AsRegister();
7648 if (lower_bound != 0) {
7649 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7650 value_reg_out = temp_reg.AsRegister();
7651 }
7652 CpuRegister value_reg(value_reg_out);
7653
7654 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007655 __ cmpl(value_reg, Immediate(num_entries - 1));
7656 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007657
Mark Mendell9c86b482015-09-18 13:36:07 -04007658 // We are in the range of the table.
7659 // Load the address of the jump table in the constant area.
7660 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007661
Mark Mendell9c86b482015-09-18 13:36:07 -04007662 // Load the (signed) offset from the jump table.
7663 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7664
7665 // Add the offset to the address of the table base.
7666 __ addq(temp_reg, base_reg);
7667
7668 // And jump.
7669 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007670}
7671
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007672void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7673 ATTRIBUTE_UNUSED) {
7674 LOG(FATAL) << "Unreachable";
7675}
7676
7677void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7678 ATTRIBUTE_UNUSED) {
7679 LOG(FATAL) << "Unreachable";
7680}
7681
Aart Bikc5d47542016-01-27 17:00:35 -08007682void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7683 if (value == 0) {
7684 __ xorl(dest, dest);
7685 } else {
7686 __ movl(dest, Immediate(value));
7687 }
7688}
7689
Mark Mendell92e83bf2015-05-07 11:25:03 -04007690void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7691 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007692 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007693 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007694 } else if (IsUint<32>(value)) {
7695 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007696 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7697 } else {
7698 __ movq(dest, Immediate(value));
7699 }
7700}
7701
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007702void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7703 if (value == 0) {
7704 __ xorps(dest, dest);
7705 } else {
7706 __ movss(dest, LiteralInt32Address(value));
7707 }
7708}
7709
7710void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7711 if (value == 0) {
7712 __ xorpd(dest, dest);
7713 } else {
7714 __ movsd(dest, LiteralInt64Address(value));
7715 }
7716}
7717
7718void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7719 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7720}
7721
7722void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7723 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7724}
7725
Aart Bika19616e2016-02-01 18:57:58 -08007726void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7727 if (value == 0) {
7728 __ testl(dest, dest);
7729 } else {
7730 __ cmpl(dest, Immediate(value));
7731 }
7732}
7733
7734void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7735 if (IsInt<32>(value)) {
7736 if (value == 0) {
7737 __ testq(dest, dest);
7738 } else {
7739 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7740 }
7741 } else {
7742 // Value won't fit in an int.
7743 __ cmpq(dest, LiteralInt64Address(value));
7744 }
7745}
7746
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007747void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7748 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007749 GenerateIntCompare(lhs_reg, rhs);
7750}
7751
7752void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007753 if (rhs.IsConstant()) {
7754 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007755 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007756 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007757 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007758 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007759 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007760 }
7761}
7762
7763void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7764 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7765 if (rhs.IsConstant()) {
7766 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7767 Compare64BitValue(lhs_reg, value);
7768 } else if (rhs.IsDoubleStackSlot()) {
7769 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7770 } else {
7771 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7772 }
7773}
7774
7775Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7776 Location index,
7777 ScaleFactor scale,
7778 uint32_t data_offset) {
7779 return index.IsConstant() ?
7780 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7781 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7782}
7783
Mark Mendellcfa410b2015-05-25 16:02:44 -04007784void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7785 DCHECK(dest.IsDoubleStackSlot());
7786 if (IsInt<32>(value)) {
7787 // Can move directly as an int32 constant.
7788 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7789 Immediate(static_cast<int32_t>(value)));
7790 } else {
7791 Load64BitValue(CpuRegister(TMP), value);
7792 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7793 }
7794}
7795
Mark Mendell9c86b482015-09-18 13:36:07 -04007796/**
7797 * Class to handle late fixup of offsets into constant area.
7798 */
7799class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7800 public:
7801 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7802 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7803
7804 protected:
7805 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7806
7807 CodeGeneratorX86_64* codegen_;
7808
7809 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01007810 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell9c86b482015-09-18 13:36:07 -04007811 // Patch the correct offset for the instruction. We use the address of the
7812 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7813 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7814 int32_t relative_position = constant_offset - pos;
7815
7816 // Patch in the right value.
7817 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7818 }
7819
7820 // Location in constant area that the fixup refers to.
7821 size_t offset_into_constant_area_;
7822};
7823
7824/**
7825 t * Class to handle late fixup of offsets to a jump table that will be created in the
7826 * constant area.
7827 */
7828class JumpTableRIPFixup : public RIPFixup {
7829 public:
7830 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7831 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7832
7833 void CreateJumpTable() {
7834 X86_64Assembler* assembler = codegen_->GetAssembler();
7835
7836 // Ensure that the reference to the jump table has the correct offset.
7837 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7838 SetOffset(offset_in_constant_table);
7839
7840 // Compute the offset from the start of the function to this jump table.
7841 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7842
7843 // Populate the jump table with the correct values for the jump table.
7844 int32_t num_entries = switch_instr_->GetNumEntries();
7845 HBasicBlock* block = switch_instr_->GetBlock();
7846 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7847 // The value that we want is the target offset - the position of the table.
7848 for (int32_t i = 0; i < num_entries; i++) {
7849 HBasicBlock* b = successors[i];
7850 Label* l = codegen_->GetLabelOf(b);
7851 DCHECK(l->IsBound());
7852 int32_t offset_to_block = l->Position() - current_table_offset;
7853 assembler->AppendInt32(offset_to_block);
7854 }
7855 }
7856
7857 private:
7858 const HPackedSwitch* switch_instr_;
7859};
7860
Mark Mendellf55c3e02015-03-26 21:07:46 -04007861void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7862 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007863 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007864 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7865 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007866 assembler->Align(4, 0);
7867 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007868
7869 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007870 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007871 jump_table->CreateJumpTable();
7872 }
7873
7874 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007875 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007876 }
7877
7878 // And finish up.
7879 CodeGenerator::Finalize(allocator);
7880}
7881
Mark Mendellf55c3e02015-03-26 21:07:46 -04007882Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007883 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007884 return Address::RIP(fixup);
7885}
7886
7887Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007888 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007889 return Address::RIP(fixup);
7890}
7891
7892Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007893 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007894 return Address::RIP(fixup);
7895}
7896
7897Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007898 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007899 return Address::RIP(fixup);
7900}
7901
Andreas Gampe85b62f22015-09-09 13:15:38 -07007902// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007903void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007904 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007905 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007906 return;
7907 }
7908
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007909 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007910
7911 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7912 if (trg.Equals(return_loc)) {
7913 return;
7914 }
7915
7916 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007917 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007918 parallel_move.AddMove(return_loc, trg, type, nullptr);
7919 GetMoveResolver()->EmitNativeCode(&parallel_move);
7920}
7921
Mark Mendell9c86b482015-09-18 13:36:07 -04007922Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7923 // Create a fixup to be used to create and address the jump table.
7924 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007925 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007926
7927 // We have to populate the jump tables.
7928 fixups_to_jump_tables_.push_back(table_fixup);
7929 return Address::RIP(table_fixup);
7930}
7931
Mark Mendellea5af682015-10-22 17:35:49 -04007932void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7933 const Address& addr_high,
7934 int64_t v,
7935 HInstruction* instruction) {
7936 if (IsInt<32>(v)) {
7937 int32_t v_32 = v;
7938 __ movq(addr_low, Immediate(v_32));
7939 MaybeRecordImplicitNullCheck(instruction);
7940 } else {
7941 // Didn't fit in a register. Do it in pieces.
7942 int32_t low_v = Low32Bits(v);
7943 int32_t high_v = High32Bits(v);
7944 __ movl(addr_low, Immediate(low_v));
7945 MaybeRecordImplicitNullCheck(instruction);
7946 __ movl(addr_high, Immediate(high_v));
7947 }
7948}
7949
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007950void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7951 const uint8_t* roots_data,
7952 const PatchInfo<Label>& info,
7953 uint64_t index_in_table) const {
7954 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7955 uintptr_t address =
7956 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00007957 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007958 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7959 dchecked_integral_cast<uint32_t>(address);
7960}
7961
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007962void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7963 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007964 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007965 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007966 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007967 }
7968
7969 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007970 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007971 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007972 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007973 }
7974}
7975
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05307976bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
7977 return codegen_->GetInstructionSetFeatures().HasAVX();
7978}
7979
7980bool LocationsBuilderX86_64::CpuHasAvx2FeatureFlag() {
7981 return codegen_->GetInstructionSetFeatures().HasAVX2();
7982}
7983
7984bool InstructionCodeGeneratorX86_64::CpuHasAvxFeatureFlag() {
7985 return codegen_->GetInstructionSetFeatures().HasAVX();
7986}
7987
7988bool InstructionCodeGeneratorX86_64::CpuHasAvx2FeatureFlag() {
7989 return codegen_->GetInstructionSetFeatures().HasAVX2();
7990}
7991
Roland Levillain4d027112015-07-01 15:41:14 +01007992#undef __
7993
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007994} // namespace x86_64
7995} // namespace art