blob: bf324d6f75af323dae1a2cb1bd74ad1b5435ce7b [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Vladimir Marko86c87522020-05-11 16:55:55 +010019#include "arch/x86_64/jni_frame_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000020#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010022#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000023#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010024#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010025#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010026#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Nicolas Geoffray4313ccb2020-08-26 17:01:15 +010028#include "interpreter/mterp/nterp.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080029#include "intrinsics.h"
30#include "intrinsics_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000031#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010032#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070033#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070034#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070035#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "mirror/object_reference.h"
Ulya Trafimovich4f388e32021-06-28 12:39:14 +010037#include "mirror/var_handle.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000038#include "scoped_thread_state_change-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010039#include "thread.h"
40#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010041#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010042#include "utils/x86_64/assembler_x86_64.h"
Alex Light3a73ffb2021-01-25 14:11:05 +000043#include "utils/x86_64/constants_x86_64.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010044#include "utils/x86_64/managed_register_x86_64.h"
45
Vladimir Marko0a516052019-10-14 13:00:44 +000046namespace art {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010047
Roland Levillain0d5a2812015-11-13 10:07:31 +000048template<class MirrorType>
49class GcRoot;
50
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010051namespace x86_64 {
52
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010053static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010054static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000055// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
56// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
57// generates less code/data with a small num_entries.
58static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010059
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000060static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000061static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010062
Mark Mendell24f2dfa2015-01-14 19:51:45 -050063static constexpr int kC2ConditionMask = 0x400;
64
Vladimir Marko3232dbb2018-07-25 15:42:46 +010065static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
66 // Custom calling convention: RAX serves as both input and output.
67 RegisterSet caller_saves = RegisterSet::Empty();
68 caller_saves.Add(Location::RegisterLocation(RAX));
69 return caller_saves;
70}
71
Roland Levillain7cbd27f2016-08-11 23:53:33 +010072// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
73#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070074#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010075
Andreas Gampe85b62f22015-09-09 13:15:38 -070076class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000078 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010080 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +000081 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010082 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000083 if (instruction_->CanThrowIntoCatchBlock()) {
84 // Live registers will be restored in the catch block if caught.
85 SaveLiveRegisters(codegen, instruction_->GetLocations());
86 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010087 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000088 instruction_,
89 instruction_->GetDexPc(),
90 this);
Roland Levillain888d0672015-11-23 18:53:50 +000091 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010092 }
93
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010094 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010095
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010096 const char* GetDescription() const override { return "NullCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010097
Nicolas Geoffraye5038322014-07-04 09:41:32 +010098 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010099 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
100};
101
Andreas Gampe85b62f22015-09-09 13:15:38 -0700102class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000103 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000104 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000105
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100106 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000107 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000108 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100109 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000110 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000111 }
112
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100113 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100114
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100115 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100116
Calin Juravled0d48522014-11-04 16:40:20 +0000117 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000118 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
119};
120
Andreas Gampe85b62f22015-09-09 13:15:38 -0700121class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000122 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100123 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000124 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000125
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100126 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000127 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100128 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000129 if (is_div_) {
130 __ negl(cpu_reg_);
131 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400132 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 }
134
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000135 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100136 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 if (is_div_) {
138 __ negq(cpu_reg_);
139 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400140 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000141 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000142 }
Calin Juravled0d48522014-11-04 16:40:20 +0000143 __ jmp(GetExitLabel());
144 }
145
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100146 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100147
Calin Juravled0d48522014-11-04 16:40:20 +0000148 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000149 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100150 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000151 const bool is_div_;
152 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000153};
154
Andreas Gampe85b62f22015-09-09 13:15:38 -0700155class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000156 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100157 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000158 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000159
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100160 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700161 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000163 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700164 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100165 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000166 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700167 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100168 if (successor_ == nullptr) {
169 __ jmp(GetReturnLabel());
170 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000171 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100172 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000173 }
174
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100175 Label* GetReturnLabel() {
176 DCHECK(successor_ == nullptr);
177 return &return_label_;
178 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100180 HBasicBlock* GetSuccessor() const {
181 return successor_;
182 }
183
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100184 const char* GetDescription() const override { return "SuspendCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100185
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000186 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100187 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000188 Label return_label_;
189
190 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
191};
192
Andreas Gampe85b62f22015-09-09 13:15:38 -0700193class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100194 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100195 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000196 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100197
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100198 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100199 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000200 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100201 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000202 if (instruction_->CanThrowIntoCatchBlock()) {
203 // Live registers will be restored in the catch block if caught.
204 SaveLiveRegisters(codegen, instruction_->GetLocations());
205 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400206 // Are we using an array length from memory?
207 HInstruction* array_length = instruction_->InputAt(1);
208 Location length_loc = locations->InAt(1);
209 InvokeRuntimeCallingConvention calling_convention;
210 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
211 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100212 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100213 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400214 Location array_loc = array_length->GetLocations()->InAt(0);
215 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
216 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
217 // Check for conflicts with index.
218 if (length_loc.Equals(locations->InAt(0))) {
219 // We know we aren't using parameter 2.
220 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
221 }
222 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100223 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100224 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700225 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400226 }
227
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000228 // We're moving two locations to locations that could overlap, so we need a parallel
229 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000230 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100231 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000232 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100233 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400234 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100235 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100236 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100237 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
238 ? kQuickThrowStringBounds
239 : kQuickThrowArrayBounds;
240 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100241 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000242 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100243 }
244
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100245 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100246
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100247 const char* GetDescription() const override { return "BoundsCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100248
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100249 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100250 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
251};
252
Andreas Gampe85b62f22015-09-09 13:15:38 -0700253class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100254 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100255 LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
256 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000257 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100258 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000259 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100261 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000262 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100263 Location out = locations->Out();
264 const uint32_t dex_pc = instruction_->GetDexPc();
265 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
266 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
267
Roland Levillain0d5a2812015-11-13 10:07:31 +0000268 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000270 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271
Vladimir Markoea4c1262017-02-06 19:59:33 +0000272 // Custom calling convention: RAX serves as both input and output.
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100273 if (must_resolve_type) {
274 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_64_codegen->GetGraph()->GetDexFile()));
275 dex::TypeIndex type_index = cls_->GetTypeIndex();
276 __ movl(CpuRegister(RAX), Immediate(type_index.index_));
Vladimir Marko8f63f102020-09-28 12:10:28 +0100277 if (cls_->NeedsAccessCheck()) {
278 CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
279 x86_64_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
280 } else {
281 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
282 x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
283 }
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100284 // If we also must_do_clinit, the resolved type is now in the correct register.
285 } else {
286 DCHECK(must_do_clinit);
287 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
288 x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
289 }
290 if (must_do_clinit) {
291 x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
292 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000293 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100294
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000295 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000296 if (out.IsValid()) {
297 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000298 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000299 }
300
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000301 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100302 __ jmp(GetExitLabel());
303 }
304
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100305 const char* GetDescription() const override { return "LoadClassSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100306
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100307 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000308 // The class this slow path will load.
309 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100310
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000311 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100312};
313
Vladimir Markoaad75c62016-10-03 08:46:48 +0000314class LoadStringSlowPathX86_64 : public SlowPathCode {
315 public:
316 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
317
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100318 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000319 LocationSummary* locations = instruction_->GetLocations();
320 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
321
322 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
323 __ Bind(GetEntryLabel());
324 SaveLiveRegisters(codegen, locations);
325
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000326 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100327 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000328 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000329 x86_64_codegen->InvokeRuntime(kQuickResolveString,
330 instruction_,
331 instruction_->GetDexPc(),
332 this);
333 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
334 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
335 RestoreLiveRegisters(codegen, locations);
336
Vladimir Markoaad75c62016-10-03 08:46:48 +0000337 __ jmp(GetExitLabel());
338 }
339
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100340 const char* GetDescription() const override { return "LoadStringSlowPathX86_64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000341
342 private:
343 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
344};
345
Andreas Gampe85b62f22015-09-09 13:15:38 -0700346class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000348 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000349 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000350
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100351 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000352 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100353 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000354 DCHECK(instruction_->IsCheckCast()
355 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000356
Roland Levillain0d5a2812015-11-13 10:07:31 +0000357 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000358 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000359
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000360 if (kPoisonHeapReferences &&
361 instruction_->IsCheckCast() &&
362 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
363 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
364 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
365 }
366
Vladimir Marko87584542017-12-12 17:47:52 +0000367 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 SaveLiveRegisters(codegen, locations);
369 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000370
371 // We're moving two locations to locations that could overlap, so we need a parallel
372 // move resolver.
373 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800374 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800375 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100376 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800377 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800378 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100379 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000380 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100381 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800382 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000383 } else {
384 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800385 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
386 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000387 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000388
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000389 if (!is_fatal_) {
390 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000392 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000393
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000394 RestoreLiveRegisters(codegen, locations);
395 __ jmp(GetExitLabel());
396 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000397 }
398
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100399 const char* GetDescription() const override { return "TypeCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100400
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100401 bool IsFatal() const override { return is_fatal_; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000402
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000403 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000404 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000405
406 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
407};
408
Andreas Gampe85b62f22015-09-09 13:15:38 -0700409class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700410 public:
Aart Bik42249c32016-01-07 15:33:50 -0800411 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000412 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700413
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100414 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000415 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100417 LocationSummary* locations = instruction_->GetLocations();
418 SaveLiveRegisters(codegen, locations);
419 InvokeRuntimeCallingConvention calling_convention;
420 x86_64_codegen->Load32BitValue(
421 CpuRegister(calling_convention.GetRegisterAt(0)),
422 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100423 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100424 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700425 }
426
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100427 const char* GetDescription() const override { return "DeoptimizationSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100428
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700429 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700430 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
431};
432
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100433class ArraySetSlowPathX86_64 : public SlowPathCode {
434 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000435 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100436
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100437 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100438 LocationSummary* locations = instruction_->GetLocations();
439 __ Bind(GetEntryLabel());
440 SaveLiveRegisters(codegen, locations);
441
442 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100443 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 parallel_move.AddMove(
445 locations->InAt(0),
446 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100447 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100448 nullptr);
449 parallel_move.AddMove(
450 locations->InAt(1),
451 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100452 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100453 nullptr);
454 parallel_move.AddMove(
455 locations->InAt(2),
456 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100457 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100458 nullptr);
459 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
460
Roland Levillain0d5a2812015-11-13 10:07:31 +0000461 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100462 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000463 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100464 RestoreLiveRegisters(codegen, locations);
465 __ jmp(GetExitLabel());
466 }
467
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100468 const char* GetDescription() const override { return "ArraySetSlowPathX86_64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100469
470 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100471 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
472};
473
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100474// Slow path marking an object reference `ref` during a read
475// barrier. The field `obj.field` in the object `obj` holding this
476// reference does not get updated by this slow path after marking (see
477// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
478//
479// This means that after the execution of this slow path, `ref` will
480// always be up-to-date, but `obj.field` may not; i.e., after the
481// flip, `ref` will be a to-space reference, but `obj.field` will
482// probably still be a from-space reference (unless it gets updated by
483// another thread, or if another thread installed another object
484// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000485class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
486 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100487 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
488 Location ref,
489 bool unpoison_ref_before_marking)
490 : SlowPathCode(instruction),
491 ref_(ref),
492 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000493 DCHECK(kEmitCompilerReadBarrier);
494 }
495
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100496 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86_64"; }
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000497
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100498 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100500 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
501 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000502 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000504 DCHECK(instruction_->IsInstanceFieldGet() ||
Alex Light3a73ffb2021-01-25 14:11:05 +0000505 instruction_->IsPredicatedInstanceFieldGet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000506 instruction_->IsStaticFieldGet() ||
507 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100508 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000509 instruction_->IsLoadClass() ||
510 instruction_->IsLoadString() ||
511 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100512 instruction_->IsCheckCast() ||
Ulya Trafimovich2ee69692021-05-19 14:15:14 +0100513 (instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000514 << "Unexpected instruction in read barrier marking slow path: "
515 << instruction_->DebugName();
516
517 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100518 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000519 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000521 }
Roland Levillain4359e612016-07-20 11:32:19 +0100522 // No need to save live registers; it's taken care of by the
523 // entrypoint. Also, there is no need to update the stack mask,
524 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000525 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 DCHECK_NE(ref_reg, RSP);
527 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100528 // "Compact" slow path, saving two moves.
529 //
530 // Instead of using the standard runtime calling convention (input
531 // and output in R0):
532 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100533 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100534 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100535 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100536 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100537 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100538 // of a dedicated entrypoint:
539 //
540 // rX <- ReadBarrierMarkRegX(rX)
541 //
542 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100543 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100544 // This runtime call does not require a stack map.
545 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000546 __ jmp(GetExitLabel());
547 }
548
549 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100550 // The location (register) of the marked object reference.
551 const Location ref_;
552 // Should the reference in `ref_` be unpoisoned prior to marking it?
553 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000554
555 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
556};
557
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100558// Slow path marking an object reference `ref` during a read barrier,
559// and if needed, atomically updating the field `obj.field` in the
560// object `obj` holding this reference after marking (contrary to
561// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
562// `obj.field`).
563//
564// This means that after the execution of this slow path, both `ref`
565// and `obj.field` will be up-to-date; i.e., after the flip, both will
566// hold the same to-space reference (unless another thread installed
567// another object reference (different from `ref`) in `obj.field`).
568class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
569 public:
570 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
571 Location ref,
572 CpuRegister obj,
573 const Address& field_addr,
574 bool unpoison_ref_before_marking,
575 CpuRegister temp1,
576 CpuRegister temp2)
577 : SlowPathCode(instruction),
578 ref_(ref),
579 obj_(obj),
580 field_addr_(field_addr),
581 unpoison_ref_before_marking_(unpoison_ref_before_marking),
582 temp1_(temp1),
583 temp2_(temp2) {
584 DCHECK(kEmitCompilerReadBarrier);
585 }
586
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100587 const char* GetDescription() const override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100588 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
589 }
590
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100591 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100592 LocationSummary* locations = instruction_->GetLocations();
593 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
594 Register ref_reg = ref_cpu_reg.AsRegister();
595 DCHECK(locations->CanCall());
596 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
597 // This slow path is only used by the UnsafeCASObject intrinsic.
Ulya Trafimovich4f388e32021-06-28 12:39:14 +0100598 DCHECK((instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100599 << "Unexpected instruction in read barrier marking and field updating slow path: "
600 << instruction_->DebugName();
601 DCHECK(instruction_->GetLocations()->Intrinsified());
Ulya Trafimovich4f388e32021-06-28 12:39:14 +0100602 Intrinsics intrinsic = instruction_->AsInvoke()->GetIntrinsic();
603 static constexpr auto kVarHandleCAS = mirror::VarHandle::AccessModeTemplate::kCompareAndSet;
604 static constexpr auto kVarHandleCAX =
605 mirror::VarHandle::AccessModeTemplate::kCompareAndExchange;
Ulya Trafimovich4b0ceb62021-07-13 12:16:02 +0100606 static constexpr auto kVarHandleGAU = mirror::VarHandle::AccessModeTemplate::kGetAndUpdate;
Ulya Trafimovich4f388e32021-06-28 12:39:14 +0100607 DCHECK(intrinsic == Intrinsics::kUnsafeCASObject ||
608 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) == kVarHandleCAS ||
Ulya Trafimovich4b0ceb62021-07-13 12:16:02 +0100609 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) == kVarHandleCAX ||
610 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) == kVarHandleGAU);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100611
612 __ Bind(GetEntryLabel());
613 if (unpoison_ref_before_marking_) {
614 // Object* ref = ref_addr->AsMirrorPtr()
615 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
616 }
617
618 // Save the old (unpoisoned) reference.
619 __ movl(temp1_, ref_cpu_reg);
620
621 // No need to save live registers; it's taken care of by the
622 // entrypoint. Also, there is no need to update the stack mask,
623 // as this runtime call will not trigger a garbage collection.
624 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
625 DCHECK_NE(ref_reg, RSP);
626 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
627 // "Compact" slow path, saving two moves.
628 //
629 // Instead of using the standard runtime calling convention (input
630 // and output in R0):
631 //
632 // RDI <- ref
633 // RAX <- ReadBarrierMark(RDI)
634 // ref <- RAX
635 //
636 // we just use rX (the register containing `ref`) as input and output
637 // of a dedicated entrypoint:
638 //
639 // rX <- ReadBarrierMarkRegX(rX)
640 //
641 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100642 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100643 // This runtime call does not require a stack map.
644 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
645
646 // If the new reference is different from the old reference,
647 // update the field in the holder (`*field_addr`).
648 //
649 // Note that this field could also hold a different object, if
650 // another thread had concurrently changed it. In that case, the
651 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
652 // operation below would abort the CAS, leaving the field as-is.
653 NearLabel done;
654 __ cmpl(temp1_, ref_cpu_reg);
655 __ j(kEqual, &done);
656
657 // Update the the holder's field atomically. This may fail if
658 // mutator updates before us, but it's OK. This is achived
659 // using a strong compare-and-set (CAS) operation with relaxed
660 // memory synchronization ordering, where the expected value is
661 // the old reference and the desired value is the new reference.
662 // This operation is implemented with a 32-bit LOCK CMPXLCHG
663 // instruction, which requires the expected value (the old
664 // reference) to be in EAX. Save RAX beforehand, and move the
665 // expected value (stored in `temp1_`) into EAX.
666 __ movq(temp2_, CpuRegister(RAX));
667 __ movl(CpuRegister(RAX), temp1_);
668
669 // Convenience aliases.
670 CpuRegister base = obj_;
671 CpuRegister expected = CpuRegister(RAX);
672 CpuRegister value = ref_cpu_reg;
673
674 bool base_equals_value = (base.AsRegister() == value.AsRegister());
675 Register value_reg = ref_reg;
676 if (kPoisonHeapReferences) {
677 if (base_equals_value) {
678 // If `base` and `value` are the same register location, move
679 // `value_reg` to a temporary register. This way, poisoning
680 // `value_reg` won't invalidate `base`.
681 value_reg = temp1_.AsRegister();
682 __ movl(CpuRegister(value_reg), base);
683 }
684
685 // Check that the register allocator did not assign the location
686 // of `expected` (RAX) to `value` nor to `base`, so that heap
687 // poisoning (when enabled) works as intended below.
688 // - If `value` were equal to `expected`, both references would
689 // be poisoned twice, meaning they would not be poisoned at
690 // all, as heap poisoning uses address negation.
691 // - If `base` were equal to `expected`, poisoning `expected`
692 // would invalidate `base`.
693 DCHECK_NE(value_reg, expected.AsRegister());
694 DCHECK_NE(base.AsRegister(), expected.AsRegister());
695
696 __ PoisonHeapReference(expected);
697 __ PoisonHeapReference(CpuRegister(value_reg));
698 }
699
700 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
701
702 // If heap poisoning is enabled, we need to unpoison the values
703 // that were poisoned earlier.
704 if (kPoisonHeapReferences) {
705 if (base_equals_value) {
706 // `value_reg` has been moved to a temporary register, no need
707 // to unpoison it.
708 } else {
709 __ UnpoisonHeapReference(CpuRegister(value_reg));
710 }
711 // No need to unpoison `expected` (RAX), as it is be overwritten below.
712 }
713
714 // Restore RAX.
715 __ movq(CpuRegister(RAX), temp2_);
716
717 __ Bind(&done);
718 __ jmp(GetExitLabel());
719 }
720
721 private:
722 // The location (register) of the marked object reference.
723 const Location ref_;
724 // The register containing the object holding the marked object reference field.
725 const CpuRegister obj_;
726 // The address of the marked reference field. The base of this address must be `obj_`.
727 const Address field_addr_;
728
729 // Should the reference in `ref_` be unpoisoned prior to marking it?
730 const bool unpoison_ref_before_marking_;
731
732 const CpuRegister temp1_;
733 const CpuRegister temp2_;
734
735 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
736};
737
Roland Levillain0d5a2812015-11-13 10:07:31 +0000738// Slow path generating a read barrier for a heap reference.
739class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
740 public:
741 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
742 Location out,
743 Location ref,
744 Location obj,
745 uint32_t offset,
746 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000747 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000748 out_(out),
749 ref_(ref),
750 obj_(obj),
751 offset_(offset),
752 index_(index) {
753 DCHECK(kEmitCompilerReadBarrier);
754 // If `obj` is equal to `out` or `ref`, it means the initial
755 // object has been overwritten by (or after) the heap object
756 // reference load to be instrumented, e.g.:
757 //
758 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000759 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000760 //
761 // In that case, we have lost the information about the original
762 // object, and the emitted read barrier cannot work properly.
763 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
764 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
765}
766
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100767 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000768 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
769 LocationSummary* locations = instruction_->GetLocations();
770 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
771 DCHECK(locations->CanCall());
772 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100773 DCHECK(instruction_->IsInstanceFieldGet() ||
Alex Light3a73ffb2021-01-25 14:11:05 +0000774 instruction_->IsPredicatedInstanceFieldGet() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100775 instruction_->IsStaticFieldGet() ||
776 instruction_->IsArrayGet() ||
777 instruction_->IsInstanceOf() ||
778 instruction_->IsCheckCast() ||
Vladimir Marko94d2c812020-11-05 10:04:45 +0000779 (instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000780 << "Unexpected instruction in read barrier for heap reference slow path: "
781 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000782
783 __ Bind(GetEntryLabel());
784 SaveLiveRegisters(codegen, locations);
785
786 // We may have to change the index's value, but as `index_` is a
787 // constant member (like other "inputs" of this slow path),
788 // introduce a copy of it, `index`.
789 Location index = index_;
790 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100791 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000792 if (instruction_->IsArrayGet()) {
793 // Compute real offset and store it in index_.
794 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
795 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
796 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
797 // We are about to change the value of `index_reg` (see the
798 // calls to art::x86_64::X86_64Assembler::shll and
799 // art::x86_64::X86_64Assembler::AddImmediate below), but it
800 // has not been saved by the previous call to
801 // art::SlowPathCode::SaveLiveRegisters, as it is a
802 // callee-save register --
803 // art::SlowPathCode::SaveLiveRegisters does not consider
804 // callee-save registers, as it has been designed with the
805 // assumption that callee-save registers are supposed to be
806 // handled by the called function. So, as a callee-save
807 // register, `index_reg` _would_ eventually be saved onto
808 // the stack, but it would be too late: we would have
809 // changed its value earlier. Therefore, we manually save
810 // it here into another freely available register,
811 // `free_reg`, chosen of course among the caller-save
812 // registers (as a callee-save `free_reg` register would
813 // exhibit the same problem).
814 //
815 // Note we could have requested a temporary register from
816 // the register allocator instead; but we prefer not to, as
817 // this is a slow path, and we know we can find a
818 // caller-save register that is available.
819 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
820 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
821 index_reg = free_reg;
822 index = Location::RegisterLocation(index_reg);
823 } else {
824 // The initial register stored in `index_` has already been
825 // saved in the call to art::SlowPathCode::SaveLiveRegisters
826 // (as it is not a callee-save register), so we can freely
827 // use it.
828 }
829 // Shifting the index value contained in `index_reg` by the
830 // scale factor (2) cannot overflow in practice, as the
831 // runtime is unable to allocate object arrays with a size
832 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
833 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
834 static_assert(
835 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
836 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
837 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
838 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100839 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
840 // intrinsics, `index_` is not shifted by a scale factor of 2
841 // (as in the case of ArrayGet), as it is actually an offset
842 // to an object field within an object.
843 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000844 DCHECK(instruction_->GetLocations()->Intrinsified());
845 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
846 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
847 << instruction_->AsInvoke()->GetIntrinsic();
848 DCHECK_EQ(offset_, 0U);
849 DCHECK(index_.IsRegister());
850 }
851 }
852
853 // We're moving two or three locations to locations that could
854 // overlap, so we need a parallel move resolver.
855 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100856 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000857 parallel_move.AddMove(ref_,
858 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100859 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000860 nullptr);
861 parallel_move.AddMove(obj_,
862 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100863 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000864 nullptr);
865 if (index.IsValid()) {
866 parallel_move.AddMove(index,
867 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100868 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000869 nullptr);
870 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
871 } else {
872 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
873 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
874 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100875 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000876 instruction_,
877 instruction_->GetDexPc(),
878 this);
879 CheckEntrypointTypes<
880 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
881 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
882
883 RestoreLiveRegisters(codegen, locations);
884 __ jmp(GetExitLabel());
885 }
886
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100887 const char* GetDescription() const override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000888 return "ReadBarrierForHeapReferenceSlowPathX86_64";
889 }
890
891 private:
892 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
893 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
894 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
895 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
896 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
897 return static_cast<CpuRegister>(i);
898 }
899 }
900 // We shall never fail to find a free caller-save register, as
901 // there are more than two core caller-save registers on x86-64
902 // (meaning it is possible to find one which is different from
903 // `ref` and `obj`).
904 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
905 LOG(FATAL) << "Could not find a free caller-save register";
906 UNREACHABLE();
907 }
908
Roland Levillain0d5a2812015-11-13 10:07:31 +0000909 const Location out_;
910 const Location ref_;
911 const Location obj_;
912 const uint32_t offset_;
913 // An additional location containing an index to an array.
914 // Only used for HArrayGet and the UnsafeGetObject &
915 // UnsafeGetObjectVolatile intrinsics.
916 const Location index_;
917
918 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
919};
920
921// Slow path generating a read barrier for a GC root.
922class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
923 public:
924 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000925 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000926 DCHECK(kEmitCompilerReadBarrier);
927 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000928
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100929 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000930 LocationSummary* locations = instruction_->GetLocations();
931 DCHECK(locations->CanCall());
932 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000933 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
934 << "Unexpected instruction in read barrier for GC root slow path: "
935 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000936
937 __ Bind(GetEntryLabel());
938 SaveLiveRegisters(codegen, locations);
939
940 InvokeRuntimeCallingConvention calling_convention;
941 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
942 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100943 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000944 instruction_,
945 instruction_->GetDexPc(),
946 this);
947 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
948 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
949
950 RestoreLiveRegisters(codegen, locations);
951 __ jmp(GetExitLabel());
952 }
953
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100954 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86_64"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000955
956 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000957 const Location out_;
958 const Location root_;
959
960 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
961};
962
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100963#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100964// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
965#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100966
Roland Levillain4fa13f62015-07-06 18:11:54 +0100967inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700968 switch (cond) {
969 case kCondEQ: return kEqual;
970 case kCondNE: return kNotEqual;
971 case kCondLT: return kLess;
972 case kCondLE: return kLessEqual;
973 case kCondGT: return kGreater;
974 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700975 case kCondB: return kBelow;
976 case kCondBE: return kBelowEqual;
977 case kCondA: return kAbove;
978 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700979 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100980 LOG(FATAL) << "Unreachable";
981 UNREACHABLE();
982}
983
Aart Bike9f37602015-10-09 11:15:55 -0700984// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100985inline Condition X86_64FPCondition(IfCondition cond) {
986 switch (cond) {
987 case kCondEQ: return kEqual;
988 case kCondNE: return kNotEqual;
989 case kCondLT: return kBelow;
990 case kCondLE: return kBelowEqual;
991 case kCondGT: return kAbove;
992 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700993 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800994 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100995 LOG(FATAL) << "Unreachable";
996 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700997}
998
Vladimir Marko86c87522020-05-11 16:55:55 +0100999void CodeGeneratorX86_64::BlockNonVolatileXmmRegisters(LocationSummary* locations) {
1000 // We have to ensure that the native code we call directly (such as @CriticalNative
1001 // or some intrinsic helpers, say Math.sin()) doesn't clobber the XMM registers
1002 // which are non-volatile for ART, but volatile for Native calls. This will ensure
1003 // that they are saved in the prologue and properly restored.
1004 for (FloatRegister fp_reg : non_volatile_xmm_regs) {
1005 locations->AddTemp(Location::FpuRegisterLocation(fp_reg));
1006 }
1007}
1008
Vladimir Markodc151b22015-10-15 18:02:30 +01001009HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
1010 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01001011 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +00001012 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01001013}
1014
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001015void CodeGeneratorX86_64::LoadMethod(MethodLoadKind load_kind, Location temp, HInvoke* invoke) {
1016 switch (load_kind) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001017 case MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01001018 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01001019 __ leal(temp.AsRegister<CpuRegister>(),
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001020 Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001021 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01001022 break;
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001023 case MethodLoadKind::kBootImageRelRo: {
Vladimir Markob066d432018-01-03 13:14:37 +00001024 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1025 __ movl(temp.AsRegister<CpuRegister>(),
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001026 Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001027 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001028 break;
1029 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001030 case MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001031 __ movq(temp.AsRegister<CpuRegister>(),
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001032 Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001033 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01001034 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko58155012015-08-19 12:49:41 +00001035 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001036 }
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001037 case MethodLoadKind::kJitDirectAddress: {
1038 Load64BitValue(temp.AsRegister<CpuRegister>(),
1039 reinterpret_cast<int64_t>(invoke->GetResolvedMethod()));
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001040 break;
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001041 }
1042 case MethodLoadKind::kRuntimeCall: {
1043 // Test situation, don't do anything.
1044 break;
1045 }
1046 default: {
1047 LOG(FATAL) << "Load kind should have already been handled " << load_kind;
1048 UNREACHABLE();
1049 }
1050 }
1051}
1052
1053void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
1054 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
1055 // All registers are assumed to be correctly set up.
1056
1057 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
1058 switch (invoke->GetMethodLoadKind()) {
1059 case MethodLoadKind::kStringInit: {
1060 // temp = thread->string_init_entrypoint
1061 uint32_t offset =
1062 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
1063 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
1064 break;
1065 }
1066 case MethodLoadKind::kRecursive: {
1067 callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
1068 break;
1069 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001070 case MethodLoadKind::kRuntimeCall: {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001071 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1072 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001073 }
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001074 case MethodLoadKind::kBootImageLinkTimePcRelative:
1075 // For kCallCriticalNative we skip loading the method and do the call directly.
1076 if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
1077 break;
1078 }
1079 FALLTHROUGH_INTENDED;
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001080 default: {
1081 LoadMethod(invoke->GetMethodLoadKind(), temp, invoke);
1082 break;
1083 }
Vladimir Marko58155012015-08-19 12:49:41 +00001084 }
1085
1086 switch (invoke->GetCodePtrLocation()) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001087 case CodePtrLocation::kCallSelf:
Vladimir Marko58155012015-08-19 12:49:41 +00001088 __ call(&frame_entry_label_);
Vladimir Marko86c87522020-05-11 16:55:55 +01001089 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Vladimir Marko58155012015-08-19 12:49:41 +00001090 break;
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001091 case CodePtrLocation::kCallCriticalNative: {
Vladimir Marko86c87522020-05-11 16:55:55 +01001092 size_t out_frame_size =
1093 PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorX86_64,
1094 kNativeStackAlignment,
Vladimir Markodec78172020-06-19 15:31:23 +01001095 GetCriticalNativeDirectCallFrameSize>(invoke);
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001096 if (invoke->GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative) {
1097 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
1098 __ call(Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
1099 RecordBootImageJniEntrypointPatch(invoke);
1100 } else {
1101 // (callee_method + offset_of_jni_entry_point)()
1102 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1103 ArtMethod::EntryPointFromJniOffset(kX86_64PointerSize).SizeValue()));
1104 }
Vladimir Marko86c87522020-05-11 16:55:55 +01001105 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
1106 // Zero-/sign-extend the result when needed due to native and managed ABI mismatch.
1107 switch (invoke->GetType()) {
1108 case DataType::Type::kBool:
1109 __ movzxb(CpuRegister(RAX), CpuRegister(RAX));
1110 break;
1111 case DataType::Type::kInt8:
1112 __ movsxb(CpuRegister(RAX), CpuRegister(RAX));
1113 break;
1114 case DataType::Type::kUint16:
1115 __ movzxw(CpuRegister(RAX), CpuRegister(RAX));
1116 break;
1117 case DataType::Type::kInt16:
1118 __ movsxw(CpuRegister(RAX), CpuRegister(RAX));
1119 break;
1120 case DataType::Type::kInt32:
1121 case DataType::Type::kInt64:
1122 case DataType::Type::kFloat32:
1123 case DataType::Type::kFloat64:
1124 case DataType::Type::kVoid:
1125 break;
1126 default:
1127 DCHECK(false) << invoke->GetType();
1128 break;
1129 }
1130 if (out_frame_size != 0u) {
Vladimir Markodec78172020-06-19 15:31:23 +01001131 DecreaseFrame(out_frame_size);
Vladimir Marko86c87522020-05-11 16:55:55 +01001132 }
1133 break;
1134 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01001135 case CodePtrLocation::kCallArtMethod:
Vladimir Marko58155012015-08-19 12:49:41 +00001136 // (callee_method + offset_of_quick_compiled_code)()
1137 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1138 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001139 kX86_64PointerSize).SizeValue()));
Vladimir Marko86c87522020-05-11 16:55:55 +01001140 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Vladimir Marko58155012015-08-19 12:49:41 +00001141 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001142 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001143
1144 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001145}
1146
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001147void CodeGeneratorX86_64::GenerateVirtualCall(
1148 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001149 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1150 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1151 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001152
1153 // Use the calling convention instead of the location of the receiver, as
1154 // intrinsics may have put the receiver in a different register. In the intrinsics
1155 // slow path, the arguments have been moved to the right place, so here we are
1156 // guaranteed that the receiver is the first register of the calling convention.
1157 InvokeDexCallingConvention calling_convention;
1158 Register receiver = calling_convention.GetRegisterAt(0);
1159
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001160 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001161 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001162 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001163 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001164 // Instead of simply (possibly) unpoisoning `temp` here, we should
1165 // emit a read barrier for the previous class reference load.
1166 // However this is not required in practice, as this is an
1167 // intermediate/temporary reference and because the current
1168 // concurrent copying collector keeps the from-space memory
1169 // intact/accessible until the end of the marking phase (the
1170 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001171 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001172
Nicolas Geoffray20036d82019-11-28 16:15:00 +00001173 MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001174
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001175 // temp = temp->GetMethodAt(method_offset);
1176 __ movq(temp, Address(temp, method_offset));
1177 // call temp->GetEntryPoint();
1178 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001179 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001180 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001181}
1182
Vladimir Marko6fd16062018-06-26 11:02:04 +01001183void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001184 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, intrinsic_data);
1185 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001186}
1187
Vladimir Markob066d432018-01-03 13:14:37 +00001188void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001189 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, boot_image_offset);
1190 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00001191}
1192
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001193void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvoke* invoke) {
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01001194 boot_image_method_patches_.emplace_back(invoke->GetResolvedMethodReference().dex_file,
1195 invoke->GetResolvedMethodReference().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001196 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001197}
1198
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01001199void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvoke* invoke) {
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01001200 DCHECK(IsSameDexFile(GetGraph()->GetDexFile(), *invoke->GetMethodReference().dex_file));
1201 method_bss_entry_patches_.emplace_back(invoke->GetMethodReference().dex_file,
1202 invoke->GetMethodReference().index);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001203 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001204}
1205
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001206void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1207 boot_image_type_patches_.emplace_back(
1208 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001209 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001210}
1211
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001212Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko8f63f102020-09-28 12:10:28 +01001213 ArenaDeque<PatchInfo<Label>>* patches = nullptr;
1214 switch (load_class->GetLoadKind()) {
1215 case HLoadClass::LoadKind::kBssEntry:
1216 patches = &type_bss_entry_patches_;
1217 break;
1218 case HLoadClass::LoadKind::kBssEntryPublic:
1219 patches = &public_type_bss_entry_patches_;
1220 break;
1221 case HLoadClass::LoadKind::kBssEntryPackage:
1222 patches = &package_type_bss_entry_patches_;
1223 break;
1224 default:
1225 LOG(FATAL) << "Unexpected load kind: " << load_class->GetLoadKind();
1226 UNREACHABLE();
1227 }
1228 patches->emplace_back(&load_class->GetDexFile(), load_class->GetTypeIndex().index_);
1229 return &patches->back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001230}
1231
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001232void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1233 boot_image_string_patches_.emplace_back(
1234 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1235 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001236}
1237
Vladimir Markoaad75c62016-10-03 08:46:48 +00001238Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001239 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001240 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001241 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001242}
1243
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001244void CodeGeneratorX86_64::RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke) {
1245 boot_image_jni_entrypoint_patches_.emplace_back(invoke->GetResolvedMethodReference().dex_file,
1246 invoke->GetResolvedMethodReference().index);
1247 __ Bind(&boot_image_jni_entrypoint_patches_.back().label);
1248}
1249
Vladimir Marko6fd16062018-06-26 11:02:04 +01001250void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
1251 if (GetCompilerOptions().IsBootImage()) {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001252 __ leal(reg,
1253 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001254 RecordBootImageIntrinsicPatch(boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001255 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001256 __ movl(reg,
1257 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001258 RecordBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001259 } else {
Vladimir Marko695348f2020-05-19 14:42:02 +01001260 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001261 gc::Heap* heap = Runtime::Current()->GetHeap();
1262 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001263 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01001264 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
1265 }
1266}
1267
Vladimir Markode91ca92020-10-27 13:41:40 +00001268void CodeGeneratorX86_64::LoadIntrinsicDeclaringClass(CpuRegister reg, HInvoke* invoke) {
1269 DCHECK_NE(invoke->GetIntrinsic(), Intrinsics::kNone);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001270 if (GetCompilerOptions().IsBootImage()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001271 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
Vladimir Markode91ca92020-10-27 13:41:40 +00001272 __ leal(reg,
Vladimir Marko4ef451a2020-07-23 09:54:27 +00001273 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01001274 MethodReference target_method = invoke->GetResolvedMethodReference();
Vladimir Marko6fd16062018-06-26 11:02:04 +01001275 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1276 boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_);
1277 __ Bind(&boot_image_type_patches_.back().label);
1278 } else {
Vladimir Markode91ca92020-10-27 13:41:40 +00001279 uint32_t boot_image_offset = GetBootImageOffsetOfIntrinsicDeclaringClass(invoke);
1280 LoadBootImageAddress(reg, boot_image_offset);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001281 }
Vladimir Marko6fd16062018-06-26 11:02:04 +01001282}
1283
Vladimir Markoaad75c62016-10-03 08:46:48 +00001284// The label points to the end of the "movl" or another instruction but the literal offset
1285// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1286constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1287
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001288template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001289inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1290 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001291 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001292 for (const PatchInfo<Label>& info : infos) {
1293 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1294 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001295 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001296 }
1297}
1298
Vladimir Marko6fd16062018-06-26 11:02:04 +01001299template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1300linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1301 const DexFile* target_dex_file,
1302 uint32_t pc_insn_offset,
1303 uint32_t boot_image_offset) {
1304 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1305 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001306}
1307
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001308void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001309 DCHECK(linker_patches->empty());
1310 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001311 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001312 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001313 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001314 type_bss_entry_patches_.size() +
Vladimir Marko8f63f102020-09-28 12:10:28 +01001315 public_type_bss_entry_patches_.size() +
1316 package_type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001317 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001318 string_bss_entry_patches_.size() +
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001319 boot_image_jni_entrypoint_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01001320 boot_image_other_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001321 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01001322 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001323 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1324 boot_image_method_patches_, linker_patches);
1325 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1326 boot_image_type_patches_, linker_patches);
1327 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001328 boot_image_string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001329 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001330 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001331 DCHECK(boot_image_type_patches_.empty());
1332 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01001333 }
1334 if (GetCompilerOptions().IsBootImage()) {
1335 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1336 boot_image_other_patches_, linker_patches);
1337 } else {
1338 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
1339 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001340 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001341 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1342 method_bss_entry_patches_, linker_patches);
1343 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1344 type_bss_entry_patches_, linker_patches);
Vladimir Marko8f63f102020-09-28 12:10:28 +01001345 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PublicTypeBssEntryPatch>(
1346 public_type_bss_entry_patches_, linker_patches);
1347 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PackageTypeBssEntryPatch>(
1348 package_type_bss_entry_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001349 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1350 string_bss_entry_patches_, linker_patches);
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001351 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
1352 boot_image_jni_entrypoint_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001353 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001354}
1355
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001356void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001357 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001358}
1359
1360void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001361 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001362}
1363
Vladimir Markoa0431112018-06-25 09:32:54 +01001364const X86_64InstructionSetFeatures& CodeGeneratorX86_64::GetInstructionSetFeatures() const {
1365 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86_64InstructionSetFeatures();
1366}
1367
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001368size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1369 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1370 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001371}
1372
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001373size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1374 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1375 return kX86_64WordSize;
1376}
1377
1378size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001379 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001380 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001381 } else {
1382 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1383 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001384 return GetSlowPathFPWidth();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001385}
1386
1387size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001388 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001389 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001390 } else {
1391 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1392 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001393 return GetSlowPathFPWidth();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001394}
1395
Calin Juravle175dc732015-08-25 15:42:32 +01001396void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1397 HInstruction* instruction,
1398 uint32_t dex_pc,
1399 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001400 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001401 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1402 if (EntrypointRequiresStackMap(entrypoint)) {
1403 RecordPcInfo(instruction, dex_pc, slow_path);
1404 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001405}
1406
Roland Levillaindec8f632016-07-22 17:10:06 +01001407void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1408 HInstruction* instruction,
1409 SlowPathCode* slow_path) {
1410 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001411 GenerateInvokeRuntime(entry_point_offset);
1412}
1413
1414void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001415 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip= */ true));
Roland Levillaindec8f632016-07-22 17:10:06 +01001416}
1417
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001418static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001419// Use a fake return address register to mimic Quick.
1420static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001421CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001422 const CompilerOptions& compiler_options,
1423 OptimizingCompilerStats* stats)
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001424 : CodeGenerator(graph,
1425 kNumberOfCpuRegisters,
1426 kNumberOfFloatRegisters,
1427 kNumberOfCpuRegisterPairs,
1428 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1429 arraysize(kCoreCalleeSaves))
1430 | (1 << kFakeReturnRegister),
1431 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1432 arraysize(kFpuCalleeSaves)),
1433 compiler_options,
1434 stats),
1435 block_labels_(nullptr),
1436 location_builder_(graph, this),
1437 instruction_visitor_(graph, this),
1438 move_resolver_(graph->GetAllocator(), this),
1439 assembler_(graph->GetAllocator()),
1440 constant_area_start_(0),
1441 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1442 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1443 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1444 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1445 public_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1446 package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1447 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1448 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1449 boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1450 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1451 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1452 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1453 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001454 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1455}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001456
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001457InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1458 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001459 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001460 assembler_(codegen->GetAssembler()),
1461 codegen_(codegen) {}
1462
David Brazdil58282f42016-01-14 12:45:10 +00001463void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001464 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001465 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001466
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001467 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001468 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001469}
1470
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001471static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001472 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001473}
David Srbecky9d8606d2015-04-12 09:35:32 +01001474
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001475static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001476 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001477}
1478
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001479void CodeGeneratorX86_64::MaybeIncrementHotness(bool is_frame_entry) {
1480 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1481 NearLabel overflow;
1482 Register method = kMethodRegisterArgument;
1483 if (!is_frame_entry) {
1484 CHECK(RequiresCurrentMethod());
1485 method = TMP;
1486 __ movq(CpuRegister(method), Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1487 }
1488 __ cmpw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1489 Immediate(ArtMethod::MaxCounter()));
1490 __ j(kEqual, &overflow);
1491 __ addw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1492 Immediate(1));
1493 __ Bind(&overflow);
1494 }
1495
1496 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray095dc462020-08-17 16:40:28 +01001497 ScopedProfilingInfoUse spiu(
1498 Runtime::Current()->GetJit(), GetGraph()->GetArtMethod(), Thread::Current());
1499 ProfilingInfo* info = spiu.GetProfilingInfo();
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00001500 if (info != nullptr) {
1501 uint64_t address = reinterpret_cast64<uint64_t>(info);
1502 NearLabel done;
1503 __ movq(CpuRegister(TMP), Immediate(address));
1504 __ addw(Address(CpuRegister(TMP), ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
1505 Immediate(1));
Nicolas Geoffray4313ccb2020-08-26 17:01:15 +01001506 __ andw(Address(CpuRegister(TMP), ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
1507 Immediate(interpreter::kTieredHotnessMask));
1508 __ j(kNotZero, &done);
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00001509 if (HasEmptyFrame()) {
1510 CHECK(is_frame_entry);
1511 // Frame alignment, and the stub expects the method on the stack.
1512 __ pushq(CpuRegister(RDI));
1513 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1514 __ cfi().RelOffset(DWARFReg(RDI), 0);
1515 } else if (!RequiresCurrentMethod()) {
1516 CHECK(is_frame_entry);
1517 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
1518 }
1519 GenerateInvokeRuntime(
1520 GetThreadOffset<kX86_64PointerSize>(kQuickCompileOptimized).Int32Value());
1521 if (HasEmptyFrame()) {
1522 __ popq(CpuRegister(RDI));
1523 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1524 __ cfi().Restore(DWARFReg(RDI));
1525 }
1526 __ Bind(&done);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001527 }
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001528 }
1529}
1530
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001531void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001532 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001533 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001534 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001535 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001536 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001537
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001538
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001539 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001540 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1541 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001542 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001543 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001544
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001545 if (!HasEmptyFrame()) {
1546 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1547 Register reg = kCoreCalleeSaves[i];
1548 if (allocated_registers_.ContainsCoreRegister(reg)) {
1549 __ pushq(CpuRegister(reg));
1550 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1551 __ cfi().RelOffset(DWARFReg(reg), 0);
1552 }
1553 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001554
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001555 int adjust = GetFrameSize() - GetCoreSpillSize();
Vladimir Markodec78172020-06-19 15:31:23 +01001556 IncreaseFrame(adjust);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001557 uint32_t xmm_spill_location = GetFpuSpillStart();
1558 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
1559
1560 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1561 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1562 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1563 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1564 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
1565 }
1566 }
1567
1568 // Save the current method if we need it. Note that we do not
1569 // do this in HCurrentMethod, as the instruction might have been removed
1570 // in the SSA graph.
1571 if (RequiresCurrentMethod()) {
1572 CHECK(!HasEmptyFrame());
1573 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1574 CpuRegister(kMethodRegisterArgument));
1575 }
1576
1577 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1578 CHECK(!HasEmptyFrame());
1579 // Initialize should_deoptimize flag to 0.
1580 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
Nicolas Geoffray98893962015-01-21 12:32:32 +00001581 }
1582 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001583
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001584 MaybeIncrementHotness(/* is_frame_entry= */ true);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001585}
1586
1587void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001588 __ cfi().RememberState();
1589 if (!HasEmptyFrame()) {
1590 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001591 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
David Srbeckyc34dc932015-04-12 09:27:43 +01001592 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1593 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1594 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1595 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1596 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1597 }
1598 }
1599
1600 int adjust = GetFrameSize() - GetCoreSpillSize();
Vladimir Markodec78172020-06-19 15:31:23 +01001601 DecreaseFrame(adjust);
David Srbeckyc34dc932015-04-12 09:27:43 +01001602
1603 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1604 Register reg = kCoreCalleeSaves[i];
1605 if (allocated_registers_.ContainsCoreRegister(reg)) {
1606 __ popq(CpuRegister(reg));
1607 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1608 __ cfi().Restore(DWARFReg(reg));
1609 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001610 }
1611 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001612 __ ret();
1613 __ cfi().RestoreState();
1614 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001615}
1616
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001617void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1618 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001619}
1620
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001621void CodeGeneratorX86_64::Move(Location destination, Location source) {
1622 if (source.Equals(destination)) {
1623 return;
1624 }
1625 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001626 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001627 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001628 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001629 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001630 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001631 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001632 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1633 } else if (source.IsConstant()) {
1634 HConstant* constant = source.GetConstant();
1635 if (constant->IsLongConstant()) {
1636 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
Ulya Trafimovich4f388e32021-06-28 12:39:14 +01001637 } else if (constant->IsDoubleConstant()) {
1638 Load64BitValue(dest, GetInt64ValueOf(constant));
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001639 } else {
1640 Load32BitValue(dest, GetInt32ValueOf(constant));
1641 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001642 } else {
1643 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001644 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001645 }
1646 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001647 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001648 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001649 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001650 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001651 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1652 } else if (source.IsConstant()) {
1653 HConstant* constant = source.GetConstant();
1654 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1655 if (constant->IsFloatConstant()) {
1656 Load32BitValue(dest, static_cast<int32_t>(value));
1657 } else {
1658 Load64BitValue(dest, value);
1659 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001660 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001661 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001662 } else {
1663 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001664 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001665 }
1666 } else if (destination.IsStackSlot()) {
1667 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001668 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001669 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001670 } else if (source.IsFpuRegister()) {
1671 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001672 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001673 } else if (source.IsConstant()) {
1674 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001675 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001676 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001677 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001678 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001679 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1680 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001681 }
1682 } else {
1683 DCHECK(destination.IsDoubleStackSlot());
1684 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001685 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001686 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001687 } else if (source.IsFpuRegister()) {
1688 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001689 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001690 } else if (source.IsConstant()) {
1691 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001692 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1693 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001694 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001695 } else {
1696 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001697 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1698 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001699 }
1700 }
1701}
1702
Ulya Trafimovich161911c2021-06-01 15:47:28 +01001703void CodeGeneratorX86_64::LoadFromMemoryNoReference(DataType::Type type,
1704 Location dst,
1705 Address src) {
1706 switch (type) {
1707 case DataType::Type::kBool:
1708 case DataType::Type::kUint8:
1709 __ movzxb(dst.AsRegister<CpuRegister>(), src);
1710 break;
1711 case DataType::Type::kInt8:
1712 __ movsxb(dst.AsRegister<CpuRegister>(), src);
1713 break;
1714 case DataType::Type::kUint16:
1715 __ movzxw(dst.AsRegister<CpuRegister>(), src);
1716 break;
1717 case DataType::Type::kInt16:
1718 __ movsxw(dst.AsRegister<CpuRegister>(), src);
1719 break;
1720 case DataType::Type::kInt32:
1721 case DataType::Type::kUint32:
1722 __ movl(dst.AsRegister<CpuRegister>(), src);
1723 break;
1724 case DataType::Type::kInt64:
1725 case DataType::Type::kUint64:
1726 __ movq(dst.AsRegister<CpuRegister>(), src);
1727 break;
1728 case DataType::Type::kFloat32:
1729 __ movss(dst.AsFpuRegister<XmmRegister>(), src);
1730 break;
1731 case DataType::Type::kFloat64:
1732 __ movsd(dst.AsFpuRegister<XmmRegister>(), src);
1733 break;
1734 case DataType::Type::kVoid:
1735 case DataType::Type::kReference:
1736 LOG(FATAL) << "Unreachable type " << type;
1737 UNREACHABLE();
1738 }
1739}
1740
Calin Juravle175dc732015-08-25 15:42:32 +01001741void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1742 DCHECK(location.IsRegister());
1743 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1744}
1745
Calin Juravlee460d1d2015-09-29 04:52:17 +01001746void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001747 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001748 Move(dst, src);
1749}
1750
1751void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1752 if (location.IsRegister()) {
1753 locations->AddTemp(location);
1754 } else {
1755 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1756 }
1757}
1758
David Brazdilfc6a86a2015-06-26 10:33:45 +00001759void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001760 if (successor->IsExitBlock()) {
1761 DCHECK(got->GetPrevious()->AlwaysThrows());
1762 return; // no code needed
1763 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001764
1765 HBasicBlock* block = got->GetBlock();
1766 HInstruction* previous = got->GetPrevious();
1767
1768 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001769 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001770 codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001771 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1772 return;
1773 }
1774
1775 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1776 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1777 }
1778 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001779 __ jmp(codegen_->GetLabelOf(successor));
1780 }
1781}
1782
David Brazdilfc6a86a2015-06-26 10:33:45 +00001783void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1784 got->SetLocations(nullptr);
1785}
1786
1787void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1788 HandleGoto(got, got->GetSuccessor());
1789}
1790
1791void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1792 try_boundary->SetLocations(nullptr);
1793}
1794
1795void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1796 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1797 if (!successor->IsExitBlock()) {
1798 HandleGoto(try_boundary, successor);
1799 }
1800}
1801
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001802void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1803 exit->SetLocations(nullptr);
1804}
1805
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001806void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001807}
1808
Mark Mendell152408f2015-12-31 12:28:50 -05001809template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001810void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001811 LabelType* true_label,
1812 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001813 if (cond->IsFPConditionTrueIfNaN()) {
1814 __ j(kUnordered, true_label);
1815 } else if (cond->IsFPConditionFalseIfNaN()) {
1816 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001817 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001818 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001819}
1820
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001821void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001822 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001823
Mark Mendellc4701932015-04-10 13:18:51 -04001824 Location left = locations->InAt(0);
1825 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001826 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001827 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001828 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001829 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001830 case DataType::Type::kInt8:
1831 case DataType::Type::kUint16:
1832 case DataType::Type::kInt16:
1833 case DataType::Type::kInt32:
1834 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001835 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001836 break;
1837 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001838 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001839 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001840 break;
1841 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001842 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001843 if (right.IsFpuRegister()) {
1844 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1845 } else if (right.IsConstant()) {
1846 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1847 codegen_->LiteralFloatAddress(
1848 right.GetConstant()->AsFloatConstant()->GetValue()));
1849 } else {
1850 DCHECK(right.IsStackSlot());
1851 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1852 Address(CpuRegister(RSP), right.GetStackIndex()));
1853 }
Mark Mendellc4701932015-04-10 13:18:51 -04001854 break;
1855 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001856 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001857 if (right.IsFpuRegister()) {
1858 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1859 } else if (right.IsConstant()) {
1860 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1861 codegen_->LiteralDoubleAddress(
1862 right.GetConstant()->AsDoubleConstant()->GetValue()));
1863 } else {
1864 DCHECK(right.IsDoubleStackSlot());
1865 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1866 Address(CpuRegister(RSP), right.GetStackIndex()));
1867 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001868 break;
1869 }
1870 default:
1871 LOG(FATAL) << "Unexpected condition type " << type;
1872 }
1873}
1874
1875template<class LabelType>
1876void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1877 LabelType* true_target_in,
1878 LabelType* false_target_in) {
1879 // Generated branching requires both targets to be explicit. If either of the
1880 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1881 LabelType fallthrough_target;
1882 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1883 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1884
1885 // Generate the comparison to set the CC.
1886 GenerateCompareTest(condition);
1887
1888 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001889 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001890 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001891 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001892 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1893 break;
1894 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001895 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001896 GenerateFPJumps(condition, true_target, false_target);
1897 break;
1898 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001899 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001900 GenerateFPJumps(condition, true_target, false_target);
1901 break;
1902 }
1903 default:
1904 LOG(FATAL) << "Unexpected condition type " << type;
1905 }
1906
David Brazdil0debae72015-11-12 18:37:00 +00001907 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001908 __ jmp(false_target);
1909 }
David Brazdil0debae72015-11-12 18:37:00 +00001910
1911 if (fallthrough_target.IsLinked()) {
1912 __ Bind(&fallthrough_target);
1913 }
Mark Mendellc4701932015-04-10 13:18:51 -04001914}
1915
David Brazdil0debae72015-11-12 18:37:00 +00001916static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1917 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1918 // are set only strictly before `branch`. We can't use the eflags on long
1919 // conditions if they are materialized due to the complex branching.
1920 return cond->IsCondition() &&
1921 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001922 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001923}
1924
Mark Mendell152408f2015-12-31 12:28:50 -05001925template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001926void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001927 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001928 LabelType* true_target,
1929 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001930 HInstruction* cond = instruction->InputAt(condition_input_index);
1931
1932 if (true_target == nullptr && false_target == nullptr) {
1933 // Nothing to do. The code always falls through.
1934 return;
1935 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001936 // Constant condition, statically compared against "true" (integer value 1).
1937 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001938 if (true_target != nullptr) {
1939 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001940 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001941 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001942 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001943 if (false_target != nullptr) {
1944 __ jmp(false_target);
1945 }
1946 }
1947 return;
1948 }
1949
1950 // The following code generates these patterns:
1951 // (1) true_target == nullptr && false_target != nullptr
1952 // - opposite condition true => branch to false_target
1953 // (2) true_target != nullptr && false_target == nullptr
1954 // - condition true => branch to true_target
1955 // (3) true_target != nullptr && false_target != nullptr
1956 // - condition true => branch to true_target
1957 // - branch to false_target
1958 if (IsBooleanValueOrMaterializedCondition(cond)) {
1959 if (AreEflagsSetFrom(cond, instruction)) {
1960 if (true_target == nullptr) {
1961 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1962 } else {
1963 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1964 }
1965 } else {
1966 // Materialized condition, compare against 0.
1967 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1968 if (lhs.IsRegister()) {
1969 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1970 } else {
1971 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1972 }
1973 if (true_target == nullptr) {
1974 __ j(kEqual, false_target);
1975 } else {
1976 __ j(kNotEqual, true_target);
1977 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001978 }
1979 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001980 // Condition has not been materialized, use its inputs as the
1981 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001982 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001983
David Brazdil0debae72015-11-12 18:37:00 +00001984 // If this is a long or FP comparison that has been folded into
1985 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001986 DataType::Type type = condition->InputAt(0)->GetType();
1987 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001988 GenerateCompareTestAndBranch(condition, true_target, false_target);
1989 return;
1990 }
1991
1992 Location lhs = condition->GetLocations()->InAt(0);
1993 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001994 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001995 if (true_target == nullptr) {
1996 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1997 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001998 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001999 }
Dave Allison20dfc792014-06-16 20:44:29 -07002000 }
David Brazdil0debae72015-11-12 18:37:00 +00002001
2002 // If neither branch falls through (case 3), the conditional branch to `true_target`
2003 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2004 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002005 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002006 }
2007}
2008
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002009void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002010 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002011 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002012 locations->SetInAt(0, Location::Any());
2013 }
2014}
2015
2016void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002017 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2018 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2019 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2020 nullptr : codegen_->GetLabelOf(true_successor);
2021 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2022 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08002023 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002024}
2025
2026void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002027 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002028 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01002029 InvokeRuntimeCallingConvention calling_convention;
2030 RegisterSet caller_saves = RegisterSet::Empty();
2031 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2032 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00002033 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002034 locations->SetInAt(0, Location::Any());
2035 }
2036}
2037
2038void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002039 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00002040 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08002041 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00002042 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08002043 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00002044}
2045
Mingyao Yang063fc772016-08-02 11:02:54 -07002046void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002047 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07002048 LocationSummary(flag, LocationSummary::kNoCall);
2049 locations->SetOut(Location::RequiresRegister());
2050}
2051
2052void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2053 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
2054 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
2055}
2056
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002057static bool SelectCanUseCMOV(HSelect* select) {
2058 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002059 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002060 return false;
2061 }
2062
2063 // A FP condition doesn't generate the single CC that we need.
2064 HInstruction* condition = select->GetCondition();
2065 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002066 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002067 return false;
2068 }
2069
2070 // We can generate a CMOV for this Select.
2071 return true;
2072}
2073
David Brazdil74eb1b22015-12-14 11:44:01 +00002074void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002075 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002076 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00002077 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002078 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00002079 } else {
2080 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002081 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05002082 if (select->InputAt(1)->IsConstant()) {
2083 locations->SetInAt(1, Location::RequiresRegister());
2084 } else {
2085 locations->SetInAt(1, Location::Any());
2086 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002087 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002088 locations->SetInAt(1, Location::Any());
2089 }
David Brazdil74eb1b22015-12-14 11:44:01 +00002090 }
2091 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2092 locations->SetInAt(2, Location::RequiresRegister());
2093 }
2094 locations->SetOut(Location::SameAsFirstInput());
2095}
2096
2097void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
2098 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002099 if (SelectCanUseCMOV(select)) {
2100 // If both the condition and the source types are integer, we can generate
2101 // a CMOV to implement Select.
2102 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05002103 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002104 DCHECK(locations->InAt(0).Equals(locations->Out()));
2105
2106 HInstruction* select_condition = select->GetCondition();
2107 Condition cond = kNotEqual;
2108
2109 // Figure out how to test the 'condition'.
2110 if (select_condition->IsCondition()) {
2111 HCondition* condition = select_condition->AsCondition();
2112 if (!condition->IsEmittedAtUseSite()) {
2113 // This was a previously materialized condition.
2114 // Can we use the existing condition code?
2115 if (AreEflagsSetFrom(condition, select)) {
2116 // Materialization was the previous instruction. Condition codes are right.
2117 cond = X86_64IntegerCondition(condition->GetCondition());
2118 } else {
2119 // No, we have to recreate the condition code.
2120 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
2121 __ testl(cond_reg, cond_reg);
2122 }
2123 } else {
2124 GenerateCompareTest(condition);
2125 cond = X86_64IntegerCondition(condition->GetCondition());
2126 }
2127 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01002128 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002129 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
2130 __ testl(cond_reg, cond_reg);
2131 }
2132
2133 // If the condition is true, overwrite the output, which already contains false.
2134 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002135 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05002136 if (value_true_loc.IsRegister()) {
2137 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
2138 } else {
2139 __ cmov(cond,
2140 value_false,
2141 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
2142 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002143 } else {
2144 NearLabel false_target;
2145 GenerateTestAndBranch<NearLabel>(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08002146 /* condition_input_index= */ 2,
2147 /* true_target= */ nullptr,
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002148 &false_target);
2149 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2150 __ Bind(&false_target);
2151 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002152}
2153
David Srbecky0cf44932015-12-09 14:09:59 +00002154void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002155 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00002156}
2157
David Srbeckyd28f4a02016-03-14 17:14:24 +00002158void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
2159 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002160}
2161
Vladimir Markodec78172020-06-19 15:31:23 +01002162void CodeGeneratorX86_64::IncreaseFrame(size_t adjustment) {
2163 __ subq(CpuRegister(RSP), Immediate(adjustment));
2164 __ cfi().AdjustCFAOffset(adjustment);
2165}
2166
2167void CodeGeneratorX86_64::DecreaseFrame(size_t adjustment) {
2168 __ addq(CpuRegister(RSP), Immediate(adjustment));
2169 __ cfi().AdjustCFAOffset(-adjustment);
2170}
2171
David Srbeckyc7098ff2016-02-09 14:30:11 +00002172void CodeGeneratorX86_64::GenerateNop() {
2173 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002174}
2175
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002176void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002177 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002178 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04002179 // Handle the long/FP comparisons made in instruction simplification.
2180 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002181 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002182 locations->SetInAt(0, Location::RequiresRegister());
2183 locations->SetInAt(1, Location::Any());
2184 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002185 case DataType::Type::kFloat32:
2186 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04002187 locations->SetInAt(0, Location::RequiresFpuRegister());
2188 locations->SetInAt(1, Location::Any());
2189 break;
2190 default:
2191 locations->SetInAt(0, Location::RequiresRegister());
2192 locations->SetInAt(1, Location::Any());
2193 break;
2194 }
David Brazdilb3e773e2016-01-26 11:28:37 +00002195 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01002196 locations->SetOut(Location::RequiresRegister());
2197 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002198}
2199
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002200void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002201 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002202 return;
Dave Allison20dfc792014-06-16 20:44:29 -07002203 }
Mark Mendellc4701932015-04-10 13:18:51 -04002204
2205 LocationSummary* locations = cond->GetLocations();
2206 Location lhs = locations->InAt(0);
2207 Location rhs = locations->InAt(1);
2208 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05002209 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002210
2211 switch (cond->InputAt(0)->GetType()) {
2212 default:
2213 // Integer case.
2214
2215 // Clear output register: setcc only sets the low byte.
2216 __ xorl(reg, reg);
2217
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002218 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002219 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002220 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002221 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002222 // Clear output register: setcc only sets the low byte.
2223 __ xorl(reg, reg);
2224
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002225 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002226 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002227 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002228 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04002229 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2230 if (rhs.IsConstant()) {
2231 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
2232 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
2233 } else if (rhs.IsStackSlot()) {
2234 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2235 } else {
2236 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2237 }
2238 GenerateFPJumps(cond, &true_label, &false_label);
2239 break;
2240 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002241 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002242 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2243 if (rhs.IsConstant()) {
2244 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
2245 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
2246 } else if (rhs.IsDoubleStackSlot()) {
2247 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2248 } else {
2249 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2250 }
2251 GenerateFPJumps(cond, &true_label, &false_label);
2252 break;
2253 }
2254 }
2255
2256 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002257 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002258
Roland Levillain4fa13f62015-07-06 18:11:54 +01002259 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002260 __ Bind(&false_label);
2261 __ xorl(reg, reg);
2262 __ jmp(&done_label);
2263
Roland Levillain4fa13f62015-07-06 18:11:54 +01002264 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002265 __ Bind(&true_label);
2266 __ movl(reg, Immediate(1));
2267 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002268}
2269
2270void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002271 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002272}
2273
2274void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002275 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002276}
2277
2278void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002279 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002280}
2281
2282void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002283 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002284}
2285
2286void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002287 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002288}
2289
2290void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002291 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002292}
2293
2294void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002295 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002296}
2297
2298void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002299 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002300}
2301
2302void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002303 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002304}
2305
2306void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002307 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002308}
2309
2310void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002311 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002312}
2313
2314void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002315 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002316}
2317
Aart Bike9f37602015-10-09 11:15:55 -07002318void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002319 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002320}
2321
2322void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002323 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002324}
2325
2326void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002327 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002328}
2329
2330void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002331 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002332}
2333
2334void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002335 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002336}
2337
2338void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002339 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002340}
2341
2342void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002343 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002344}
2345
2346void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002347 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002348}
2349
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002350void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002351 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002352 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002353 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002354 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002355 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002356 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002357 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002358 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002359 case DataType::Type::kInt32:
2360 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002361 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002362 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002363 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2364 break;
2365 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002366 case DataType::Type::kFloat32:
2367 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002368 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002369 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002370 locations->SetOut(Location::RequiresRegister());
2371 break;
2372 }
2373 default:
2374 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2375 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002376}
2377
2378void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002379 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002380 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002381 Location left = locations->InAt(0);
2382 Location right = locations->InAt(1);
2383
Mark Mendell0c9497d2015-08-21 09:30:05 -04002384 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002385 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002386 Condition less_cond = kLess;
2387
Calin Juravleddb7df22014-11-25 20:56:51 +00002388 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002389 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002390 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002391 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002392 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002393 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002394 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002395 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002396 break;
2397 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002398 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002399 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002400 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002401 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002402 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002403 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2404 if (right.IsConstant()) {
2405 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2406 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2407 } else if (right.IsStackSlot()) {
2408 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2409 } else {
2410 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2411 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002412 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002413 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002414 break;
2415 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002416 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002417 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2418 if (right.IsConstant()) {
2419 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2420 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2421 } else if (right.IsDoubleStackSlot()) {
2422 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2423 } else {
2424 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2425 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002426 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002427 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002428 break;
2429 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002430 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002431 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002432 }
Aart Bika19616e2016-02-01 18:57:58 -08002433
Calin Juravleddb7df22014-11-25 20:56:51 +00002434 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002435 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002436 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002437
Calin Juravle91debbc2014-11-26 19:01:09 +00002438 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002439 __ movl(out, Immediate(1));
2440 __ jmp(&done);
2441
2442 __ Bind(&less);
2443 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002444
2445 __ Bind(&done);
2446}
2447
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002448void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002449 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002450 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002451 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002452}
2453
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002454void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002455 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002456}
2457
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002458void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2459 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002460 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002461 locations->SetOut(Location::ConstantLocation(constant));
2462}
2463
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002464void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002465 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002466}
2467
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002468void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002469 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002470 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002471 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002472}
2473
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002474void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002475 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002476}
2477
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002478void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2479 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002480 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002481 locations->SetOut(Location::ConstantLocation(constant));
2482}
2483
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002484void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002485 // Will be generated at use site.
2486}
2487
2488void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2489 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002490 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002491 locations->SetOut(Location::ConstantLocation(constant));
2492}
2493
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002494void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2495 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002496 // Will be generated at use site.
2497}
2498
Igor Murashkind01745e2017-04-05 16:40:31 -07002499void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2500 constructor_fence->SetLocations(nullptr);
2501}
2502
2503void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2504 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2505 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2506}
2507
Calin Juravle27df7582015-04-17 19:12:31 +01002508void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2509 memory_barrier->SetLocations(nullptr);
2510}
2511
2512void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002513 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002514}
2515
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002516void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2517 ret->SetLocations(nullptr);
2518}
2519
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002520void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002521 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002522}
2523
2524void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002525 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002526 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002527 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002528 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002529 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002530 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002531 case DataType::Type::kInt8:
2532 case DataType::Type::kUint16:
2533 case DataType::Type::kInt16:
2534 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002535 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002536 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002537 break;
2538
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002539 case DataType::Type::kFloat32:
2540 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002541 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002542 break;
2543
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002544 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002545 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002546 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002547}
2548
2549void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002550 switch (ret->InputAt(0)->GetType()) {
2551 case DataType::Type::kReference:
2552 case DataType::Type::kBool:
2553 case DataType::Type::kUint8:
2554 case DataType::Type::kInt8:
2555 case DataType::Type::kUint16:
2556 case DataType::Type::kInt16:
2557 case DataType::Type::kInt32:
2558 case DataType::Type::kInt64:
2559 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
2560 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002561
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002562 case DataType::Type::kFloat32: {
2563 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2564 XMM0);
2565 // To simplify callers of an OSR method, we put the return value in both
2566 // floating point and core register.
2567 if (GetGraph()->IsCompilingOsr()) {
2568 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ false);
2569 }
2570 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002571 }
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002572 case DataType::Type::kFloat64: {
2573 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2574 XMM0);
2575 // To simplify callers of an OSR method, we put the return value in both
2576 // floating point and core register.
2577 if (GetGraph()->IsCompilingOsr()) {
2578 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ true);
2579 }
2580 break;
2581 }
2582
2583 default:
2584 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002585 }
2586 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002587}
2588
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002589Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002590 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002591 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002592 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002593 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002594 case DataType::Type::kInt8:
2595 case DataType::Type::kUint16:
2596 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002597 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002598 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002599 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002600 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002601 return Location::RegisterLocation(RAX);
2602
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002603 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002604 return Location::NoLocation();
2605
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002606 case DataType::Type::kFloat64:
2607 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002608 return Location::FpuRegisterLocation(XMM0);
2609 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002610
2611 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002612}
2613
2614Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2615 return Location::RegisterLocation(kMethodRegisterArgument);
2616}
2617
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002618Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002619 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002620 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002621 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002622 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002623 case DataType::Type::kInt8:
2624 case DataType::Type::kUint16:
2625 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002626 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002627 uint32_t index = gp_index_++;
2628 stack_index_++;
2629 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002630 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002631 } else {
2632 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2633 }
2634 }
2635
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002636 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002637 uint32_t index = gp_index_;
2638 stack_index_ += 2;
2639 if (index < calling_convention.GetNumberOfRegisters()) {
2640 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002641 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002642 } else {
2643 gp_index_ += 2;
2644 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2645 }
2646 }
2647
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002648 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002649 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002650 stack_index_++;
2651 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002652 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002653 } else {
2654 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2655 }
2656 }
2657
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002658 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002659 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002660 stack_index_ += 2;
2661 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002662 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002663 } else {
2664 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2665 }
2666 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002667
Aart Bik66c158e2018-01-31 12:55:04 -08002668 case DataType::Type::kUint32:
2669 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002670 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002671 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002672 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002673 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002674 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002675}
2676
Vladimir Marko86c87522020-05-11 16:55:55 +01002677Location CriticalNativeCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
2678 DCHECK_NE(type, DataType::Type::kReference);
2679
2680 Location location = Location::NoLocation();
2681 if (DataType::IsFloatingPointType(type)) {
2682 if (fpr_index_ < kParameterFloatRegistersLength) {
2683 location = Location::FpuRegisterLocation(kParameterFloatRegisters[fpr_index_]);
2684 ++fpr_index_;
2685 }
2686 } else {
2687 // Native ABI uses the same registers as managed, except that the method register RDI
2688 // is a normal argument.
2689 if (gpr_index_ < 1u + kParameterCoreRegistersLength) {
2690 location = Location::RegisterLocation(
2691 gpr_index_ == 0u ? RDI : kParameterCoreRegisters[gpr_index_ - 1u]);
2692 ++gpr_index_;
2693 }
2694 }
2695 if (location.IsInvalid()) {
2696 if (DataType::Is64BitType(type)) {
2697 location = Location::DoubleStackSlot(stack_offset_);
2698 } else {
2699 location = Location::StackSlot(stack_offset_);
2700 }
2701 stack_offset_ += kFramePointerSize;
2702
2703 if (for_register_allocation_) {
2704 location = Location::Any();
2705 }
2706 }
2707 return location;
2708}
2709
2710Location CriticalNativeCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type)
2711 const {
2712 // We perform conversion to the managed ABI return register after the call if needed.
2713 InvokeDexCallingConventionVisitorX86_64 dex_calling_convention;
2714 return dex_calling_convention.GetReturnLocation(type);
2715}
2716
2717Location CriticalNativeCallingConventionVisitorX86_64::GetMethodLocation() const {
2718 // Pass the method in the hidden argument RAX.
2719 return Location::RegisterLocation(RAX);
2720}
2721
Calin Juravle175dc732015-08-25 15:42:32 +01002722void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2723 // The trampoline uses the same calling convention as dex calling conventions,
2724 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2725 // the method_idx.
2726 HandleInvoke(invoke);
2727}
2728
2729void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2730 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2731}
2732
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002733void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002734 // Explicit clinit checks triggered by static invokes must have been pruned by
2735 // art::PrepareForRegisterAllocation.
2736 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002737
Mark Mendellfb8d2792015-03-31 22:16:59 -04002738 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002739 if (intrinsic.TryDispatch(invoke)) {
2740 return;
2741 }
2742
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01002743 if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
Vladimir Marko86c87522020-05-11 16:55:55 +01002744 CriticalNativeCallingConventionVisitorX86_64 calling_convention_visitor(
2745 /*for_register_allocation=*/ true);
2746 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
2747 CodeGeneratorX86_64::BlockNonVolatileXmmRegisters(invoke->GetLocations());
2748 } else {
2749 HandleInvoke(invoke);
2750 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002751}
2752
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002753static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2754 if (invoke->GetLocations()->Intrinsified()) {
2755 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2756 intrinsic.Dispatch(invoke);
2757 return true;
2758 }
2759 return false;
2760}
2761
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002762void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002763 // Explicit clinit checks triggered by static invokes must have been pruned by
2764 // art::PrepareForRegisterAllocation.
2765 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002766
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002767 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2768 return;
2769 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002770
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002771 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002772 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002773 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002774}
2775
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002776void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002777 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002778 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002779}
2780
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002781void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002782 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002783 if (intrinsic.TryDispatch(invoke)) {
2784 return;
2785 }
2786
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002787 HandleInvoke(invoke);
2788}
2789
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002790void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002791 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2792 return;
2793 }
2794
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002795 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002796 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002797}
2798
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002799void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2800 HandleInvoke(invoke);
2801 // Add the hidden argument.
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002802 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive) {
2803 invoke->GetLocations()->SetInAt(invoke->GetNumberOfArguments() - 1,
2804 Location::RegisterLocation(RAX));
2805 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002806 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2807}
2808
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002809void CodeGeneratorX86_64::MaybeGenerateInlineCacheCheck(HInstruction* instruction,
2810 CpuRegister klass) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002811 DCHECK_EQ(RDI, klass.AsRegister());
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002812 // We know the destination of an intrinsic, so no need to record inline
2813 // caches.
2814 if (!instruction->GetLocations()->Intrinsified() &&
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00002815 GetGraph()->IsCompilingBaseline() &&
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002816 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray095dc462020-08-17 16:40:28 +01002817 ScopedProfilingInfoUse spiu(
2818 Runtime::Current()->GetJit(), GetGraph()->GetArtMethod(), Thread::Current());
2819 ProfilingInfo* info = spiu.GetProfilingInfo();
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002820 if (info != nullptr) {
2821 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
2822 uint64_t address = reinterpret_cast64<uint64_t>(cache);
2823 NearLabel done;
2824 __ movq(CpuRegister(TMP), Immediate(address));
2825 // Fast path for a monomorphic cache.
2826 __ cmpl(Address(CpuRegister(TMP), InlineCache::ClassesOffset().Int32Value()), klass);
2827 __ j(kEqual, &done);
2828 GenerateInvokeRuntime(
2829 GetThreadOffset<kX86_64PointerSize>(kQuickUpdateInlineCache).Int32Value());
2830 __ Bind(&done);
2831 }
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002832 }
2833}
2834
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002835void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2836 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002837 LocationSummary* locations = invoke->GetLocations();
2838 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002839 Location receiver = locations->InAt(0);
2840 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2841
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002842 if (receiver.IsStackSlot()) {
2843 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002844 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002845 __ movl(temp, Address(temp, class_offset));
2846 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002847 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002848 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002849 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002850 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002851 // Instead of simply (possibly) unpoisoning `temp` here, we should
2852 // emit a read barrier for the previous class reference load.
2853 // However this is not required in practice, as this is an
2854 // intermediate/temporary reference and because the current
2855 // concurrent copying collector keeps the from-space memory
2856 // intact/accessible until the end of the marking phase (the
2857 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002858 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002859
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002860 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002861
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002862 if (invoke->GetHiddenArgumentLoadKind() != MethodLoadKind::kRecursive &&
2863 invoke->GetHiddenArgumentLoadKind() != MethodLoadKind::kRuntimeCall) {
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002864 Location hidden_reg = locations->GetTemp(1);
2865 // Set the hidden argument. This is safe to do this here, as RAX
2866 // won't be modified thereafter, before the `call` instruction.
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002867 // We also do it after MaybeGenerateInlineCache that may use RAX.
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002868 DCHECK_EQ(RAX, hidden_reg.AsRegister<Register>());
2869 codegen_->LoadMethod(invoke->GetHiddenArgumentLoadKind(), hidden_reg, invoke);
2870 }
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002871
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002872 // temp = temp->GetAddressOfIMT()
2873 __ movq(temp,
2874 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2875 // temp = temp->GetImtEntryAt(method_offset);
2876 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002877 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002878 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002879 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002880 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRuntimeCall) {
2881 // We pass the method from the IMT in case of a conflict. This will ensure
2882 // we go into the runtime to resolve the actual method.
2883 Location hidden_reg = locations->GetTemp(1);
2884 __ movq(hidden_reg.AsRegister<CpuRegister>(), temp);
2885 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002886 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002887 __ call(Address(
2888 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002889
2890 DCHECK(!codegen_->IsLeafMethod());
2891 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2892}
2893
Orion Hodsonac141392017-01-13 11:53:47 +00002894void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00002895 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
2896 if (intrinsic.TryDispatch(invoke)) {
2897 return;
2898 }
Orion Hodsonac141392017-01-13 11:53:47 +00002899 HandleInvoke(invoke);
2900}
2901
2902void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00002903 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2904 return;
2905 }
Orion Hodsonac141392017-01-13 11:53:47 +00002906 codegen_->GenerateInvokePolymorphicCall(invoke);
2907}
2908
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002909void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2910 HandleInvoke(invoke);
2911}
2912
2913void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2914 codegen_->GenerateInvokeCustomCall(invoke);
2915}
2916
Roland Levillain88cb1752014-10-20 16:36:47 +01002917void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2918 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002919 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002920 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002921 case DataType::Type::kInt32:
2922 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002923 locations->SetInAt(0, Location::RequiresRegister());
2924 locations->SetOut(Location::SameAsFirstInput());
2925 break;
2926
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002927 case DataType::Type::kFloat32:
2928 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002929 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002930 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002931 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002932 break;
2933
2934 default:
2935 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2936 }
2937}
2938
2939void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2940 LocationSummary* locations = neg->GetLocations();
2941 Location out = locations->Out();
2942 Location in = locations->InAt(0);
2943 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002944 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002945 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002946 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002947 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002948 break;
2949
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002950 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002951 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002952 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002953 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002954 break;
2955
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002956 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002957 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002958 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002959 // Implement float negation with an exclusive or with value
2960 // 0x80000000 (mask for bit 31, representing the sign of a
2961 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002962 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002963 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002964 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002965 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002966
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002967 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002968 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002969 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002970 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002971 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002972 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002973 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002974 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002975 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002976 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002977
2978 default:
2979 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2980 }
2981}
2982
Roland Levillaindff1f282014-11-05 14:15:05 +00002983void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2984 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002985 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002986 DataType::Type result_type = conversion->GetResultType();
2987 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002988 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2989 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002990
Roland Levillaindff1f282014-11-05 14:15:05 +00002991 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002992 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002993 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002994 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002995 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002996 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2997 locations->SetInAt(0, Location::Any());
2998 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002999 break;
3000
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003001 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00003002 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003003 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00003004 locations->SetInAt(0, Location::Any());
3005 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3006 break;
3007
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003008 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00003009 locations->SetInAt(0, Location::RequiresFpuRegister());
3010 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00003011 break;
3012
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003013 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003014 locations->SetInAt(0, Location::RequiresFpuRegister());
3015 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00003016 break;
3017
3018 default:
3019 LOG(FATAL) << "Unexpected type conversion from " << input_type
3020 << " to " << result_type;
3021 }
3022 break;
3023
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003024 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00003025 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003026 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003027 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003028 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003029 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003030 case DataType::Type::kInt16:
3031 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00003032 // TODO: We would benefit from a (to-be-implemented)
3033 // Location::RegisterOrStackSlot requirement for this input.
3034 locations->SetInAt(0, Location::RequiresRegister());
3035 locations->SetOut(Location::RequiresRegister());
3036 break;
3037
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003038 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00003039 locations->SetInAt(0, Location::RequiresFpuRegister());
3040 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00003041 break;
3042
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003043 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003044 locations->SetInAt(0, Location::RequiresFpuRegister());
3045 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00003046 break;
3047
3048 default:
3049 LOG(FATAL) << "Unexpected type conversion from " << input_type
3050 << " to " << result_type;
3051 }
3052 break;
3053
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003054 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003055 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003056 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003057 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003058 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003059 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003060 case DataType::Type::kInt16:
3061 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003062 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00003063 locations->SetOut(Location::RequiresFpuRegister());
3064 break;
3065
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003066 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003067 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00003068 locations->SetOut(Location::RequiresFpuRegister());
3069 break;
3070
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003071 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003072 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00003073 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003074 break;
3075
3076 default:
3077 LOG(FATAL) << "Unexpected type conversion from " << input_type
3078 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003079 }
Roland Levillaincff13742014-11-17 14:32:17 +00003080 break;
3081
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003082 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003083 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003084 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003085 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003086 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003087 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003088 case DataType::Type::kInt16:
3089 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003090 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00003091 locations->SetOut(Location::RequiresFpuRegister());
3092 break;
3093
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003094 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003095 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00003096 locations->SetOut(Location::RequiresFpuRegister());
3097 break;
3098
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003099 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003100 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00003101 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003102 break;
3103
3104 default:
3105 LOG(FATAL) << "Unexpected type conversion from " << input_type
3106 << " to " << result_type;
3107 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003108 break;
3109
3110 default:
3111 LOG(FATAL) << "Unexpected type conversion from " << input_type
3112 << " to " << result_type;
3113 }
3114}
3115
3116void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
3117 LocationSummary* locations = conversion->GetLocations();
3118 Location out = locations->Out();
3119 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003120 DataType::Type result_type = conversion->GetResultType();
3121 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003122 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3123 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00003124 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003125 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00003126 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003127 case DataType::Type::kInt8:
3128 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003129 case DataType::Type::kInt16:
3130 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003131 case DataType::Type::kInt64:
3132 if (in.IsRegister()) {
3133 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
3134 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
3135 __ movzxb(out.AsRegister<CpuRegister>(),
3136 Address(CpuRegister(RSP), in.GetStackIndex()));
3137 } else {
3138 __ movl(out.AsRegister<CpuRegister>(),
3139 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
3140 }
3141 break;
3142
3143 default:
3144 LOG(FATAL) << "Unexpected type conversion from " << input_type
3145 << " to " << result_type;
3146 }
3147 break;
3148
3149 case DataType::Type::kInt8:
3150 switch (input_type) {
3151 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003152 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003153 case DataType::Type::kInt16:
3154 case DataType::Type::kInt32:
3155 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00003156 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003157 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00003158 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003159 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00003160 Address(CpuRegister(RSP), in.GetStackIndex()));
3161 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003162 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00003163 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00003164 }
3165 break;
3166
3167 default:
3168 LOG(FATAL) << "Unexpected type conversion from " << input_type
3169 << " to " << result_type;
3170 }
3171 break;
3172
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003173 case DataType::Type::kUint16:
3174 switch (input_type) {
3175 case DataType::Type::kInt8:
3176 case DataType::Type::kInt16:
3177 case DataType::Type::kInt32:
3178 case DataType::Type::kInt64:
3179 if (in.IsRegister()) {
3180 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
3181 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
3182 __ movzxw(out.AsRegister<CpuRegister>(),
3183 Address(CpuRegister(RSP), in.GetStackIndex()));
3184 } else {
3185 __ movl(out.AsRegister<CpuRegister>(),
3186 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
3187 }
3188 break;
3189
3190 default:
3191 LOG(FATAL) << "Unexpected type conversion from " << input_type
3192 << " to " << result_type;
3193 }
3194 break;
3195
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003196 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00003197 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003198 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003199 case DataType::Type::kInt32:
3200 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00003201 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003202 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00003203 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003204 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00003205 Address(CpuRegister(RSP), in.GetStackIndex()));
3206 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003207 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00003208 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00003209 }
3210 break;
3211
3212 default:
3213 LOG(FATAL) << "Unexpected type conversion from " << input_type
3214 << " to " << result_type;
3215 }
3216 break;
3217
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003218 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00003219 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003220 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00003221 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003222 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00003223 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003224 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00003225 Address(CpuRegister(RSP), in.GetStackIndex()));
3226 } else {
3227 DCHECK(in.IsConstant());
3228 DCHECK(in.GetConstant()->IsLongConstant());
3229 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003230 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00003231 }
3232 break;
3233
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003234 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00003235 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3236 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003237 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00003238
3239 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04003240 // if input >= (float)INT_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003241 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimIntMax)));
Roland Levillain3f8f9362014-12-02 17:45:01 +00003242 __ j(kAboveEqual, &done);
3243 // if input == NaN goto nan
3244 __ j(kUnordered, &nan);
3245 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003246 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00003247 __ jmp(&done);
3248 __ Bind(&nan);
3249 // output = 0
3250 __ xorl(output, output);
3251 __ Bind(&done);
3252 break;
3253 }
3254
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003255 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003256 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3257 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003258 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003259
3260 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04003261 // if input >= (double)INT_MAX goto done
3262 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003263 __ j(kAboveEqual, &done);
3264 // if input == NaN goto nan
3265 __ j(kUnordered, &nan);
3266 // output = double-to-int-truncate(input)
3267 __ cvttsd2si(output, input);
3268 __ jmp(&done);
3269 __ Bind(&nan);
3270 // output = 0
3271 __ xorl(output, output);
3272 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00003273 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003274 }
Roland Levillain946e1432014-11-11 17:35:19 +00003275
3276 default:
3277 LOG(FATAL) << "Unexpected type conversion from " << input_type
3278 << " to " << result_type;
3279 }
3280 break;
3281
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003282 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00003283 switch (input_type) {
3284 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003285 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003286 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003287 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003288 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003289 case DataType::Type::kInt16:
3290 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00003291 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003292 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00003293 break;
3294
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003295 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00003296 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3297 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003298 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00003299
Mark Mendell92e83bf2015-05-07 11:25:03 -04003300 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003301 // if input >= (float)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003302 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimLongMax)));
Roland Levillain624279f2014-12-04 11:54:28 +00003303 __ j(kAboveEqual, &done);
3304 // if input == NaN goto nan
3305 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003306 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003307 __ cvttss2si(output, input, true);
3308 __ jmp(&done);
3309 __ Bind(&nan);
3310 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003311 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00003312 __ Bind(&done);
3313 break;
3314 }
3315
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003316 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003317 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3318 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003319 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003320
Mark Mendell92e83bf2015-05-07 11:25:03 -04003321 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003322 // if input >= (double)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003323 __ comisd(input, codegen_->LiteralDoubleAddress(
3324 static_cast<double>(kPrimLongMax)));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003325 __ j(kAboveEqual, &done);
3326 // if input == NaN goto nan
3327 __ j(kUnordered, &nan);
3328 // output = double-to-long-truncate(input)
3329 __ cvttsd2si(output, input, true);
3330 __ jmp(&done);
3331 __ Bind(&nan);
3332 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003333 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003334 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00003335 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003336 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003337
3338 default:
3339 LOG(FATAL) << "Unexpected type conversion from " << input_type
3340 << " to " << result_type;
3341 }
3342 break;
3343
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003344 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003345 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003346 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003347 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003348 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003349 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003350 case DataType::Type::kInt16:
3351 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003352 if (in.IsRegister()) {
3353 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3354 } else if (in.IsConstant()) {
3355 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3356 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003357 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003358 } else {
3359 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3360 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3361 }
Roland Levillaincff13742014-11-17 14:32:17 +00003362 break;
3363
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003364 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003365 if (in.IsRegister()) {
3366 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3367 } else if (in.IsConstant()) {
3368 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3369 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06003370 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003371 } else {
3372 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3373 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3374 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003375 break;
3376
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003377 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003378 if (in.IsFpuRegister()) {
3379 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3380 } else if (in.IsConstant()) {
3381 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
3382 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003383 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003384 } else {
3385 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
3386 Address(CpuRegister(RSP), in.GetStackIndex()));
3387 }
Roland Levillaincff13742014-11-17 14:32:17 +00003388 break;
3389
3390 default:
3391 LOG(FATAL) << "Unexpected type conversion from " << input_type
3392 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003393 }
Roland Levillaincff13742014-11-17 14:32:17 +00003394 break;
3395
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003396 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003397 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003398 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003399 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003400 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003401 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003402 case DataType::Type::kInt16:
3403 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003404 if (in.IsRegister()) {
3405 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3406 } else if (in.IsConstant()) {
3407 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3408 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003409 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003410 } else {
3411 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3412 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3413 }
Roland Levillaincff13742014-11-17 14:32:17 +00003414 break;
3415
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003416 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003417 if (in.IsRegister()) {
3418 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3419 } else if (in.IsConstant()) {
3420 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3421 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003422 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003423 } else {
3424 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3425 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3426 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003427 break;
3428
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003429 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003430 if (in.IsFpuRegister()) {
3431 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3432 } else if (in.IsConstant()) {
3433 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3434 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003435 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003436 } else {
3437 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3438 Address(CpuRegister(RSP), in.GetStackIndex()));
3439 }
Roland Levillaincff13742014-11-17 14:32:17 +00003440 break;
3441
3442 default:
3443 LOG(FATAL) << "Unexpected type conversion from " << input_type
3444 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003445 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003446 break;
3447
3448 default:
3449 LOG(FATAL) << "Unexpected type conversion from " << input_type
3450 << " to " << result_type;
3451 }
3452}
3453
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003454void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003455 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003456 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003457 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003458 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003459 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003460 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3461 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003462 break;
3463 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003464
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003465 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003466 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003467 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003468 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003469 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003470 break;
3471 }
3472
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003473 case DataType::Type::kFloat64:
3474 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003475 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003476 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003477 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003478 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003479 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003480
3481 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003482 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003483 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003484}
3485
3486void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3487 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003488 Location first = locations->InAt(0);
3489 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003490 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003491
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003492 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003493 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003494 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003495 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3496 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003497 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3498 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003499 } else {
3500 __ leal(out.AsRegister<CpuRegister>(), Address(
3501 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3502 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003503 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003504 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3505 __ addl(out.AsRegister<CpuRegister>(),
3506 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3507 } else {
3508 __ leal(out.AsRegister<CpuRegister>(), Address(
3509 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3510 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003511 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003512 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003513 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003514 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003515 break;
3516 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003517
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003518 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003519 if (second.IsRegister()) {
3520 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3521 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003522 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3523 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003524 } else {
3525 __ leaq(out.AsRegister<CpuRegister>(), Address(
3526 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3527 }
3528 } else {
3529 DCHECK(second.IsConstant());
3530 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3531 int32_t int32_value = Low32Bits(value);
3532 DCHECK_EQ(int32_value, value);
3533 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3534 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3535 } else {
3536 __ leaq(out.AsRegister<CpuRegister>(), Address(
3537 first.AsRegister<CpuRegister>(), int32_value));
3538 }
3539 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003540 break;
3541 }
3542
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003543 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003544 if (second.IsFpuRegister()) {
3545 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3546 } else if (second.IsConstant()) {
3547 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003548 codegen_->LiteralFloatAddress(
3549 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003550 } else {
3551 DCHECK(second.IsStackSlot());
3552 __ addss(first.AsFpuRegister<XmmRegister>(),
3553 Address(CpuRegister(RSP), second.GetStackIndex()));
3554 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003555 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003556 }
3557
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003558 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003559 if (second.IsFpuRegister()) {
3560 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3561 } else if (second.IsConstant()) {
3562 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003563 codegen_->LiteralDoubleAddress(
3564 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003565 } else {
3566 DCHECK(second.IsDoubleStackSlot());
3567 __ addsd(first.AsFpuRegister<XmmRegister>(),
3568 Address(CpuRegister(RSP), second.GetStackIndex()));
3569 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003570 break;
3571 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003572
3573 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003574 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003575 }
3576}
3577
3578void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003579 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003580 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003581 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003582 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003583 locations->SetInAt(0, Location::RequiresRegister());
3584 locations->SetInAt(1, Location::Any());
3585 locations->SetOut(Location::SameAsFirstInput());
3586 break;
3587 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003588 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003589 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003590 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003591 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003592 break;
3593 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003594 case DataType::Type::kFloat32:
3595 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003596 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003597 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003598 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003599 break;
Calin Juravle11351682014-10-23 15:38:15 +01003600 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003601 default:
Calin Juravle11351682014-10-23 15:38:15 +01003602 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003603 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003604}
3605
3606void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3607 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003608 Location first = locations->InAt(0);
3609 Location second = locations->InAt(1);
3610 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003611 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003612 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003613 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003614 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003615 } else if (second.IsConstant()) {
3616 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003617 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003618 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003619 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003620 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003621 break;
3622 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003623 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003624 if (second.IsConstant()) {
3625 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3626 DCHECK(IsInt<32>(value));
3627 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3628 } else {
3629 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3630 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003631 break;
3632 }
3633
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003634 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003635 if (second.IsFpuRegister()) {
3636 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3637 } else if (second.IsConstant()) {
3638 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003639 codegen_->LiteralFloatAddress(
3640 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003641 } else {
3642 DCHECK(second.IsStackSlot());
3643 __ subss(first.AsFpuRegister<XmmRegister>(),
3644 Address(CpuRegister(RSP), second.GetStackIndex()));
3645 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003646 break;
Calin Juravle11351682014-10-23 15:38:15 +01003647 }
3648
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003649 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003650 if (second.IsFpuRegister()) {
3651 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3652 } else if (second.IsConstant()) {
3653 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003654 codegen_->LiteralDoubleAddress(
3655 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003656 } else {
3657 DCHECK(second.IsDoubleStackSlot());
3658 __ subsd(first.AsFpuRegister<XmmRegister>(),
3659 Address(CpuRegister(RSP), second.GetStackIndex()));
3660 }
Calin Juravle11351682014-10-23 15:38:15 +01003661 break;
3662 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003663
3664 default:
Calin Juravle11351682014-10-23 15:38:15 +01003665 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003666 }
3667}
3668
Calin Juravle34bacdf2014-10-07 20:23:36 +01003669void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3670 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003671 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003672 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003673 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003674 locations->SetInAt(0, Location::RequiresRegister());
3675 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003676 if (mul->InputAt(1)->IsIntConstant()) {
3677 // Can use 3 operand multiply.
3678 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3679 } else {
3680 locations->SetOut(Location::SameAsFirstInput());
3681 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003682 break;
3683 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003684 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003685 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003686 locations->SetInAt(1, Location::Any());
3687 if (mul->InputAt(1)->IsLongConstant() &&
3688 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003689 // Can use 3 operand multiply.
3690 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3691 } else {
3692 locations->SetOut(Location::SameAsFirstInput());
3693 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003694 break;
3695 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003696 case DataType::Type::kFloat32:
3697 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003698 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003699 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003700 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003701 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003702 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003703
3704 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003705 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003706 }
3707}
3708
3709void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3710 LocationSummary* locations = mul->GetLocations();
3711 Location first = locations->InAt(0);
3712 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003713 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003714 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003715 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003716 // The constant may have ended up in a register, so test explicitly to avoid
3717 // problems where the output may not be the same as the first operand.
3718 if (mul->InputAt(1)->IsIntConstant()) {
3719 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3720 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3721 } else if (second.IsRegister()) {
3722 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003723 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003724 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003725 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003726 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003727 __ imull(first.AsRegister<CpuRegister>(),
3728 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003729 }
3730 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003731 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003732 // The constant may have ended up in a register, so test explicitly to avoid
3733 // problems where the output may not be the same as the first operand.
3734 if (mul->InputAt(1)->IsLongConstant()) {
3735 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3736 if (IsInt<32>(value)) {
3737 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3738 Immediate(static_cast<int32_t>(value)));
3739 } else {
3740 // Have to use the constant area.
3741 DCHECK(first.Equals(out));
3742 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3743 }
3744 } else if (second.IsRegister()) {
3745 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003746 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003747 } else {
3748 DCHECK(second.IsDoubleStackSlot());
3749 DCHECK(first.Equals(out));
3750 __ imulq(first.AsRegister<CpuRegister>(),
3751 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003752 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003753 break;
3754 }
3755
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003756 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003757 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003758 if (second.IsFpuRegister()) {
3759 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3760 } else if (second.IsConstant()) {
3761 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003762 codegen_->LiteralFloatAddress(
3763 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003764 } else {
3765 DCHECK(second.IsStackSlot());
3766 __ mulss(first.AsFpuRegister<XmmRegister>(),
3767 Address(CpuRegister(RSP), second.GetStackIndex()));
3768 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003769 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003770 }
3771
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003772 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003773 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003774 if (second.IsFpuRegister()) {
3775 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3776 } else if (second.IsConstant()) {
3777 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003778 codegen_->LiteralDoubleAddress(
3779 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003780 } else {
3781 DCHECK(second.IsDoubleStackSlot());
3782 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3783 Address(CpuRegister(RSP), second.GetStackIndex()));
3784 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003785 break;
3786 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003787
3788 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003789 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003790 }
3791}
3792
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003793void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3794 uint32_t stack_adjustment, bool is_float) {
3795 if (source.IsStackSlot()) {
3796 DCHECK(is_float);
3797 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3798 } else if (source.IsDoubleStackSlot()) {
3799 DCHECK(!is_float);
3800 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3801 } else {
3802 // Write the value to the temporary location on the stack and load to FP stack.
3803 if (is_float) {
3804 Location stack_temp = Location::StackSlot(temp_offset);
3805 codegen_->Move(stack_temp, source);
3806 __ flds(Address(CpuRegister(RSP), temp_offset));
3807 } else {
3808 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3809 codegen_->Move(stack_temp, source);
3810 __ fldl(Address(CpuRegister(RSP), temp_offset));
3811 }
3812 }
3813}
3814
3815void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003816 DataType::Type type = rem->GetResultType();
3817 bool is_float = type == DataType::Type::kFloat32;
3818 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003819 LocationSummary* locations = rem->GetLocations();
3820 Location first = locations->InAt(0);
3821 Location second = locations->InAt(1);
3822 Location out = locations->Out();
3823
3824 // Create stack space for 2 elements.
3825 // TODO: enhance register allocator to ask for stack temporaries.
3826 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3827
3828 // Load the values to the FP stack in reverse order, using temporaries if needed.
3829 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3830 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3831
3832 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003833 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003834 __ Bind(&retry);
3835 __ fprem();
3836
3837 // Move FP status to AX.
3838 __ fstsw();
3839
3840 // And see if the argument reduction is complete. This is signaled by the
3841 // C2 FPU flag bit set to 0.
3842 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3843 __ j(kNotEqual, &retry);
3844
3845 // We have settled on the final value. Retrieve it into an XMM register.
3846 // Store FP top of stack to real stack.
3847 if (is_float) {
3848 __ fsts(Address(CpuRegister(RSP), 0));
3849 } else {
3850 __ fstl(Address(CpuRegister(RSP), 0));
3851 }
3852
3853 // Pop the 2 items from the FP stack.
3854 __ fucompp();
3855
3856 // Load the value from the stack into an XMM register.
3857 DCHECK(out.IsFpuRegister()) << out;
3858 if (is_float) {
3859 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3860 } else {
3861 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3862 }
3863
3864 // And remove the temporary stack space we allocated.
3865 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3866}
3867
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003868void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3869 DCHECK(instruction->IsDiv() || instruction->IsRem());
3870
3871 LocationSummary* locations = instruction->GetLocations();
3872 Location second = locations->InAt(1);
3873 DCHECK(second.IsConstant());
3874
3875 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3876 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003877 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003878
3879 DCHECK(imm == 1 || imm == -1);
3880
3881 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003882 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003883 if (instruction->IsRem()) {
3884 __ xorl(output_register, output_register);
3885 } else {
3886 __ movl(output_register, input_register);
3887 if (imm == -1) {
3888 __ negl(output_register);
3889 }
3890 }
3891 break;
3892 }
3893
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003894 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003895 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003896 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003897 } else {
3898 __ movq(output_register, input_register);
3899 if (imm == -1) {
3900 __ negq(output_register);
3901 }
3902 }
3903 break;
3904 }
3905
3906 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003907 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003908 }
3909}
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303910void InstructionCodeGeneratorX86_64::RemByPowerOfTwo(HRem* instruction) {
3911 LocationSummary* locations = instruction->GetLocations();
3912 Location second = locations->InAt(1);
3913 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3914 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3915 int64_t imm = Int64FromConstant(second.GetConstant());
3916 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3917 uint64_t abs_imm = AbsOrMin(imm);
3918 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3919 if (instruction->GetResultType() == DataType::Type::kInt32) {
3920 NearLabel done;
3921 __ movl(out, numerator);
3922 __ andl(out, Immediate(abs_imm-1));
3923 __ j(Condition::kZero, &done);
3924 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3925 __ testl(numerator, numerator);
3926 __ cmov(Condition::kLess, out, tmp, false);
3927 __ Bind(&done);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003928
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303929 } else {
3930 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
3931 codegen_->Load64BitValue(tmp, abs_imm - 1);
3932 NearLabel done;
3933
3934 __ movq(out, numerator);
3935 __ andq(out, tmp);
3936 __ j(Condition::kZero, &done);
3937 __ movq(tmp, numerator);
3938 __ sarq(tmp, Immediate(63));
3939 __ shlq(tmp, Immediate(WhichPowerOf2(abs_imm)));
3940 __ orq(out, tmp);
3941 __ Bind(&done);
3942 }
3943}
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003944void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003945 LocationSummary* locations = instruction->GetLocations();
3946 Location second = locations->InAt(1);
3947
3948 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3949 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3950
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003951 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003952 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3953 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003954
3955 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3956
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003957 if (instruction->GetResultType() == DataType::Type::kInt32) {
Atul Bajaj1cc73292018-11-15 11:36:53 +05303958 // When denominator is equal to 2, we can add signed bit and numerator to tmp.
3959 // Below we are using addl instruction instead of cmov which give us 1 cycle benefit.
3960 if (abs_imm == 2) {
3961 __ leal(tmp, Address(numerator, 0));
3962 __ shrl(tmp, Immediate(31));
3963 __ addl(tmp, numerator);
3964 } else {
3965 __ leal(tmp, Address(numerator, abs_imm - 1));
3966 __ testl(numerator, numerator);
3967 __ cmov(kGreaterEqual, tmp, numerator);
3968 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003969 int shift = CTZ(imm);
3970 __ sarl(tmp, Immediate(shift));
3971
3972 if (imm < 0) {
3973 __ negl(tmp);
3974 }
3975
3976 __ movl(output_register, tmp);
3977 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003978 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003979 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
Atul Bajaj1cc73292018-11-15 11:36:53 +05303980 if (abs_imm == 2) {
3981 __ movq(rdx, numerator);
3982 __ shrq(rdx, Immediate(63));
3983 __ addq(rdx, numerator);
3984 } else {
3985 codegen_->Load64BitValue(rdx, abs_imm - 1);
3986 __ addq(rdx, numerator);
3987 __ testq(numerator, numerator);
3988 __ cmov(kGreaterEqual, rdx, numerator);
3989 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003990 int shift = CTZ(imm);
3991 __ sarq(rdx, Immediate(shift));
3992
3993 if (imm < 0) {
3994 __ negq(rdx);
3995 }
3996
3997 __ movq(output_register, rdx);
3998 }
3999}
4000
4001void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
4002 DCHECK(instruction->IsDiv() || instruction->IsRem());
4003
4004 LocationSummary* locations = instruction->GetLocations();
4005 Location second = locations->InAt(1);
4006
4007 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
4008 : locations->GetTemp(0).AsRegister<CpuRegister>();
4009 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
4010 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
4011 : locations->Out().AsRegister<CpuRegister>();
4012 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4013
4014 DCHECK_EQ(RAX, eax.AsRegister());
4015 DCHECK_EQ(RDX, edx.AsRegister());
4016 if (instruction->IsDiv()) {
4017 DCHECK_EQ(RAX, out.AsRegister());
4018 } else {
4019 DCHECK_EQ(RDX, out.AsRegister());
4020 }
4021
4022 int64_t magic;
4023 int shift;
4024
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004025 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004026 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004027 int imm = second.GetConstant()->AsIntConstant()->GetValue();
4028
Andreas Gampe3db70682018-12-26 15:12:03 -08004029 CalculateMagicAndShiftForDivRem(imm, false /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004030
4031 __ movl(numerator, eax);
4032
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004033 __ movl(eax, Immediate(magic));
4034 __ imull(numerator);
4035
4036 if (imm > 0 && magic < 0) {
4037 __ addl(edx, numerator);
4038 } else if (imm < 0 && magic > 0) {
4039 __ subl(edx, numerator);
4040 }
4041
4042 if (shift != 0) {
4043 __ sarl(edx, Immediate(shift));
4044 }
4045
4046 __ movl(eax, edx);
4047 __ shrl(edx, Immediate(31));
4048 __ addl(edx, eax);
4049
4050 if (instruction->IsRem()) {
4051 __ movl(eax, numerator);
4052 __ imull(edx, Immediate(imm));
4053 __ subl(eax, edx);
4054 __ movl(edx, eax);
4055 } else {
4056 __ movl(eax, edx);
4057 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004058 } else {
4059 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
4060
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004061 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004062
4063 CpuRegister rax = eax;
4064 CpuRegister rdx = edx;
4065
Andreas Gampe3db70682018-12-26 15:12:03 -08004066 CalculateMagicAndShiftForDivRem(imm, true /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004067
4068 // Save the numerator.
4069 __ movq(numerator, rax);
4070
4071 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04004072 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004073
4074 // RDX:RAX = magic * numerator
4075 __ imulq(numerator);
4076
4077 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004078 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004079 __ addq(rdx, numerator);
4080 } else if (imm < 0 && magic > 0) {
4081 // RDX -= numerator
4082 __ subq(rdx, numerator);
4083 }
4084
4085 // Shift if needed.
4086 if (shift != 0) {
4087 __ sarq(rdx, Immediate(shift));
4088 }
4089
4090 // RDX += 1 if RDX < 0
4091 __ movq(rax, rdx);
4092 __ shrq(rdx, Immediate(63));
4093 __ addq(rdx, rax);
4094
4095 if (instruction->IsRem()) {
4096 __ movq(rax, numerator);
4097
4098 if (IsInt<32>(imm)) {
4099 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
4100 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04004101 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004102 }
4103
4104 __ subq(rax, rdx);
4105 __ movq(rdx, rax);
4106 } else {
4107 __ movq(rax, rdx);
4108 }
4109 }
4110}
4111
Calin Juravlebacfec32014-11-14 15:54:36 +00004112void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
4113 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004114 DataType::Type type = instruction->GetResultType();
4115 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00004116
4117 bool is_div = instruction->IsDiv();
4118 LocationSummary* locations = instruction->GetLocations();
4119
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004120 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4121 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00004122
Roland Levillain271ab9c2014-11-27 15:23:57 +00004123 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004124 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00004125
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004126 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004127 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00004128
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004129 if (imm == 0) {
4130 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
4131 } else if (imm == 1 || imm == -1) {
4132 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05304133 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
4134 if (is_div) {
4135 DivByPowerOfTwo(instruction->AsDiv());
4136 } else {
4137 RemByPowerOfTwo(instruction->AsRem());
4138 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004139 } else {
4140 DCHECK(imm <= -2 || imm >= 2);
4141 GenerateDivRemWithAnyConstant(instruction);
4142 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004143 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004144 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004145 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00004146 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004147 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00004148
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004149 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4150 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
4151 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
4152 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004153 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004154 __ cmpl(second_reg, Immediate(-1));
4155 __ j(kEqual, slow_path->GetEntryLabel());
4156 // edx:eax <- sign-extended of eax
4157 __ cdq();
4158 // eax = quotient, edx = remainder
4159 __ idivl(second_reg);
4160 } else {
4161 __ cmpq(second_reg, Immediate(-1));
4162 __ j(kEqual, slow_path->GetEntryLabel());
4163 // rdx:rax <- sign-extended of rax
4164 __ cqo();
4165 // rax = quotient, rdx = remainder
4166 __ idivq(second_reg);
4167 }
4168 __ Bind(slow_path->GetExitLabel());
4169 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004170}
4171
Calin Juravle7c4954d2014-10-28 16:57:40 +00004172void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
4173 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004174 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00004175 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004176 case DataType::Type::kInt32:
4177 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00004178 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004179 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00004180 locations->SetOut(Location::SameAsFirstInput());
4181 // Intel uses edx:eax as the dividend.
4182 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004183 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
4184 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
4185 // output and request another temp.
4186 if (div->InputAt(1)->IsConstant()) {
4187 locations->AddTemp(Location::RequiresRegister());
4188 }
Calin Juravled0d48522014-11-04 16:40:20 +00004189 break;
4190 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004191
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004192 case DataType::Type::kFloat32:
4193 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00004194 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04004195 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00004196 locations->SetOut(Location::SameAsFirstInput());
4197 break;
4198 }
4199
4200 default:
4201 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4202 }
4203}
4204
4205void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
4206 LocationSummary* locations = div->GetLocations();
4207 Location first = locations->InAt(0);
4208 Location second = locations->InAt(1);
4209 DCHECK(first.Equals(locations->Out()));
4210
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004211 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004212 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004213 case DataType::Type::kInt32:
4214 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004215 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00004216 break;
4217 }
4218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004219 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04004220 if (second.IsFpuRegister()) {
4221 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
4222 } else if (second.IsConstant()) {
4223 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004224 codegen_->LiteralFloatAddress(
4225 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04004226 } else {
4227 DCHECK(second.IsStackSlot());
4228 __ divss(first.AsFpuRegister<XmmRegister>(),
4229 Address(CpuRegister(RSP), second.GetStackIndex()));
4230 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004231 break;
4232 }
4233
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004234 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04004235 if (second.IsFpuRegister()) {
4236 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
4237 } else if (second.IsConstant()) {
4238 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004239 codegen_->LiteralDoubleAddress(
4240 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04004241 } else {
4242 DCHECK(second.IsDoubleStackSlot());
4243 __ divsd(first.AsFpuRegister<XmmRegister>(),
4244 Address(CpuRegister(RSP), second.GetStackIndex()));
4245 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004246 break;
4247 }
4248
4249 default:
4250 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4251 }
4252}
4253
Calin Juravlebacfec32014-11-14 15:54:36 +00004254void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004255 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004256 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004257 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004258
4259 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004260 case DataType::Type::kInt32:
4261 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004262 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004263 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00004264 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
4265 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004266 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
4267 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
4268 // output and request another temp.
4269 if (rem->InputAt(1)->IsConstant()) {
4270 locations->AddTemp(Location::RequiresRegister());
4271 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004272 break;
4273 }
4274
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004275 case DataType::Type::kFloat32:
4276 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004277 locations->SetInAt(0, Location::Any());
4278 locations->SetInAt(1, Location::Any());
4279 locations->SetOut(Location::RequiresFpuRegister());
4280 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00004281 break;
4282 }
4283
4284 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00004285 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00004286 }
4287}
4288
4289void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004290 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00004291 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004292 case DataType::Type::kInt32:
4293 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004294 GenerateDivRemIntegral(rem);
4295 break;
4296 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004297 case DataType::Type::kFloat32:
4298 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004299 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004300 break;
4301 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004302 default:
4303 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
4304 }
4305}
4306
Aart Bik1f8d51b2018-02-15 10:42:37 -08004307static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4308 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4309 switch (minmax->GetResultType()) {
4310 case DataType::Type::kInt32:
4311 case DataType::Type::kInt64:
4312 locations->SetInAt(0, Location::RequiresRegister());
4313 locations->SetInAt(1, Location::RequiresRegister());
4314 locations->SetOut(Location::SameAsFirstInput());
4315 break;
4316 case DataType::Type::kFloat32:
4317 case DataType::Type::kFloat64:
4318 locations->SetInAt(0, Location::RequiresFpuRegister());
4319 locations->SetInAt(1, Location::RequiresFpuRegister());
4320 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
4321 // the second input to be the output (we can simply swap inputs).
4322 locations->SetOut(Location::SameAsFirstInput());
4323 break;
4324 default:
4325 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4326 }
4327}
4328
Aart Bik351df3e2018-03-07 11:54:57 -08004329void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
4330 bool is_min,
4331 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004332 Location op1_loc = locations->InAt(0);
4333 Location op2_loc = locations->InAt(1);
4334
4335 // Shortcut for same input locations.
4336 if (op1_loc.Equals(op2_loc)) {
4337 // Can return immediately, as op1_loc == out_loc.
4338 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
4339 // a copy here.
4340 DCHECK(locations->Out().Equals(op1_loc));
4341 return;
4342 }
4343
4344 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4345 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
4346
4347 // (out := op1)
4348 // out <=? op2
4349 // if out is min jmp done
4350 // out := op2
4351 // done:
4352
4353 if (type == DataType::Type::kInt64) {
4354 __ cmpq(out, op2);
4355 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
4356 } else {
4357 DCHECK_EQ(type, DataType::Type::kInt32);
4358 __ cmpl(out, op2);
4359 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
4360 }
4361}
4362
4363void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
4364 bool is_min,
4365 DataType::Type type) {
4366 Location op1_loc = locations->InAt(0);
4367 Location op2_loc = locations->InAt(1);
4368 Location out_loc = locations->Out();
4369 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4370
4371 // Shortcut for same input locations.
4372 if (op1_loc.Equals(op2_loc)) {
4373 DCHECK(out_loc.Equals(op1_loc));
4374 return;
4375 }
4376
4377 // (out := op1)
4378 // out <=? op2
4379 // if Nan jmp Nan_label
4380 // if out is min jmp done
4381 // if op2 is min jmp op2_label
4382 // handle -0/+0
4383 // jmp done
4384 // Nan_label:
4385 // out := NaN
4386 // op2_label:
4387 // out := op2
4388 // done:
4389 //
4390 // This removes one jmp, but needs to copy one input (op1) to out.
4391 //
4392 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
4393
4394 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4395
4396 NearLabel nan, done, op2_label;
4397 if (type == DataType::Type::kFloat64) {
4398 __ ucomisd(out, op2);
4399 } else {
4400 DCHECK_EQ(type, DataType::Type::kFloat32);
4401 __ ucomiss(out, op2);
4402 }
4403
4404 __ j(Condition::kParityEven, &nan);
4405
4406 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4407 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4408
4409 // Handle 0.0/-0.0.
4410 if (is_min) {
4411 if (type == DataType::Type::kFloat64) {
4412 __ orpd(out, op2);
4413 } else {
4414 __ orps(out, op2);
4415 }
4416 } else {
4417 if (type == DataType::Type::kFloat64) {
4418 __ andpd(out, op2);
4419 } else {
4420 __ andps(out, op2);
4421 }
4422 }
4423 __ jmp(&done);
4424
4425 // NaN handling.
4426 __ Bind(&nan);
4427 if (type == DataType::Type::kFloat64) {
4428 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
4429 } else {
4430 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
4431 }
4432 __ jmp(&done);
4433
4434 // out := op2;
4435 __ Bind(&op2_label);
4436 if (type == DataType::Type::kFloat64) {
4437 __ movsd(out, op2);
4438 } else {
4439 __ movss(out, op2);
4440 }
4441
4442 // Done.
4443 __ Bind(&done);
4444}
4445
Aart Bik351df3e2018-03-07 11:54:57 -08004446void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4447 DataType::Type type = minmax->GetResultType();
4448 switch (type) {
4449 case DataType::Type::kInt32:
4450 case DataType::Type::kInt64:
4451 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4452 break;
4453 case DataType::Type::kFloat32:
4454 case DataType::Type::kFloat64:
4455 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4456 break;
4457 default:
4458 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4459 }
4460}
4461
Aart Bik1f8d51b2018-02-15 10:42:37 -08004462void LocationsBuilderX86_64::VisitMin(HMin* min) {
4463 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4464}
4465
4466void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004467 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004468}
4469
4470void LocationsBuilderX86_64::VisitMax(HMax* max) {
4471 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4472}
4473
4474void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004475 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004476}
4477
Aart Bik3dad3412018-02-28 12:01:46 -08004478void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4479 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4480 switch (abs->GetResultType()) {
4481 case DataType::Type::kInt32:
4482 case DataType::Type::kInt64:
4483 locations->SetInAt(0, Location::RequiresRegister());
4484 locations->SetOut(Location::SameAsFirstInput());
4485 locations->AddTemp(Location::RequiresRegister());
4486 break;
4487 case DataType::Type::kFloat32:
4488 case DataType::Type::kFloat64:
4489 locations->SetInAt(0, Location::RequiresFpuRegister());
4490 locations->SetOut(Location::SameAsFirstInput());
4491 locations->AddTemp(Location::RequiresFpuRegister());
4492 break;
4493 default:
4494 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4495 }
4496}
4497
4498void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4499 LocationSummary* locations = abs->GetLocations();
4500 switch (abs->GetResultType()) {
4501 case DataType::Type::kInt32: {
4502 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4503 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4504 // Create mask.
4505 __ movl(mask, out);
4506 __ sarl(mask, Immediate(31));
4507 // Add mask.
4508 __ addl(out, mask);
4509 __ xorl(out, mask);
4510 break;
4511 }
4512 case DataType::Type::kInt64: {
4513 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4514 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4515 // Create mask.
4516 __ movq(mask, out);
4517 __ sarq(mask, Immediate(63));
4518 // Add mask.
4519 __ addq(out, mask);
4520 __ xorq(out, mask);
4521 break;
4522 }
4523 case DataType::Type::kFloat32: {
4524 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4525 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4526 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4527 __ andps(out, mask);
4528 break;
4529 }
4530 case DataType::Type::kFloat64: {
4531 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4532 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4533 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4534 __ andpd(out, mask);
4535 break;
4536 }
4537 default:
4538 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4539 }
4540}
4541
Calin Juravled0d48522014-11-04 16:40:20 +00004542void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004543 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004544 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004545}
4546
4547void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004548 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004549 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004550 codegen_->AddSlowPath(slow_path);
4551
4552 LocationSummary* locations = instruction->GetLocations();
4553 Location value = locations->InAt(0);
4554
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004555 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004556 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004557 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004558 case DataType::Type::kInt8:
4559 case DataType::Type::kUint16:
4560 case DataType::Type::kInt16:
4561 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004562 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004563 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004564 __ j(kEqual, slow_path->GetEntryLabel());
4565 } else if (value.IsStackSlot()) {
4566 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4567 __ j(kEqual, slow_path->GetEntryLabel());
4568 } else {
4569 DCHECK(value.IsConstant()) << value;
4570 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004571 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004572 }
4573 }
4574 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004575 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004576 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004577 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004578 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004579 __ j(kEqual, slow_path->GetEntryLabel());
4580 } else if (value.IsDoubleStackSlot()) {
4581 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4582 __ j(kEqual, slow_path->GetEntryLabel());
4583 } else {
4584 DCHECK(value.IsConstant()) << value;
4585 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004586 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004587 }
4588 }
4589 break;
4590 }
4591 default:
4592 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004593 }
Calin Juravled0d48522014-11-04 16:40:20 +00004594}
4595
Calin Juravle9aec02f2014-11-18 23:06:35 +00004596void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4597 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4598
4599 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004600 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004601
4602 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004603 case DataType::Type::kInt32:
4604 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004605 locations->SetInAt(0, Location::RequiresRegister());
4606 // The shift count needs to be in CL.
4607 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4608 locations->SetOut(Location::SameAsFirstInput());
4609 break;
4610 }
4611 default:
4612 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4613 }
4614}
4615
4616void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4617 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4618
4619 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004620 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004621 Location second = locations->InAt(1);
4622
4623 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004624 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004625 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004626 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004627 if (op->IsShl()) {
4628 __ shll(first_reg, second_reg);
4629 } else if (op->IsShr()) {
4630 __ sarl(first_reg, second_reg);
4631 } else {
4632 __ shrl(first_reg, second_reg);
4633 }
4634 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004635 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004636 if (op->IsShl()) {
4637 __ shll(first_reg, imm);
4638 } else if (op->IsShr()) {
4639 __ sarl(first_reg, imm);
4640 } else {
4641 __ shrl(first_reg, imm);
4642 }
4643 }
4644 break;
4645 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004646 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004647 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004648 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004649 if (op->IsShl()) {
4650 __ shlq(first_reg, second_reg);
4651 } else if (op->IsShr()) {
4652 __ sarq(first_reg, second_reg);
4653 } else {
4654 __ shrq(first_reg, second_reg);
4655 }
4656 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004657 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004658 if (op->IsShl()) {
4659 __ shlq(first_reg, imm);
4660 } else if (op->IsShr()) {
4661 __ sarq(first_reg, imm);
4662 } else {
4663 __ shrq(first_reg, imm);
4664 }
4665 }
4666 break;
4667 }
4668 default:
4669 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004670 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004671 }
4672}
4673
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004674void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4675 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004676 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004677
4678 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004679 case DataType::Type::kInt32:
4680 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004681 locations->SetInAt(0, Location::RequiresRegister());
4682 // The shift count needs to be in CL (unless it is a constant).
4683 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4684 locations->SetOut(Location::SameAsFirstInput());
4685 break;
4686 }
4687 default:
4688 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4689 UNREACHABLE();
4690 }
4691}
4692
4693void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4694 LocationSummary* locations = ror->GetLocations();
4695 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4696 Location second = locations->InAt(1);
4697
4698 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004699 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004700 if (second.IsRegister()) {
4701 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4702 __ rorl(first_reg, second_reg);
4703 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004704 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004705 __ rorl(first_reg, imm);
4706 }
4707 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004708 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004709 if (second.IsRegister()) {
4710 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4711 __ rorq(first_reg, second_reg);
4712 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004713 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004714 __ rorq(first_reg, imm);
4715 }
4716 break;
4717 default:
4718 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4719 UNREACHABLE();
4720 }
4721}
4722
Calin Juravle9aec02f2014-11-18 23:06:35 +00004723void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4724 HandleShift(shl);
4725}
4726
4727void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4728 HandleShift(shl);
4729}
4730
4731void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4732 HandleShift(shr);
4733}
4734
4735void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4736 HandleShift(shr);
4737}
4738
4739void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4740 HandleShift(ushr);
4741}
4742
4743void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4744 HandleShift(ushr);
4745}
4746
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004747void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004748 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4749 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004750 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07004751 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004752 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004753}
4754
4755void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004756 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4757 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4758 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004759}
4760
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004761void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004762 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4763 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004764 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004765 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004766 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4767 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004768}
4769
4770void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004771 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4772 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004773 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004774 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004775 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004776}
4777
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004778void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004779 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004780 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004781 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4782 if (location.IsStackSlot()) {
4783 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4784 } else if (location.IsDoubleStackSlot()) {
4785 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4786 }
4787 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004788}
4789
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004790void InstructionCodeGeneratorX86_64::VisitParameterValue(
4791 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004792 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004793}
4794
4795void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4796 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004797 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004798 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4799}
4800
4801void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4802 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4803 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004804}
4805
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004806void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4807 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004808 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004809 locations->SetInAt(0, Location::RequiresRegister());
4810 locations->SetOut(Location::RequiresRegister());
4811}
4812
4813void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4814 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004815 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004816 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004817 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004818 __ movq(locations->Out().AsRegister<CpuRegister>(),
4819 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004820 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004821 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004822 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004823 __ movq(locations->Out().AsRegister<CpuRegister>(),
4824 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4825 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004826 __ movq(locations->Out().AsRegister<CpuRegister>(),
4827 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004828 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004829}
4830
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004831void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004832 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004833 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004834 locations->SetInAt(0, Location::RequiresRegister());
4835 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004836}
4837
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004838void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4839 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004840 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4841 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004842 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004843 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004844 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004845 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004846 break;
4847
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004848 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004849 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004850 break;
4851
4852 default:
4853 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4854 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004855}
4856
David Brazdil66d126e2015-04-03 16:02:44 +01004857void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4858 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004859 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004860 locations->SetInAt(0, Location::RequiresRegister());
4861 locations->SetOut(Location::SameAsFirstInput());
4862}
4863
4864void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004865 LocationSummary* locations = bool_not->GetLocations();
4866 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4867 locations->Out().AsRegister<CpuRegister>().AsRegister());
4868 Location out = locations->Out();
4869 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4870}
4871
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004872void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004873 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004874 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004875 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004876 locations->SetInAt(i, Location::Any());
4877 }
4878 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004879}
4880
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004881void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004882 LOG(FATAL) << "Unimplemented";
4883}
4884
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004885void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004886 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004887 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004888 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004889 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4890 */
4891 switch (kind) {
4892 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004893 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004894 break;
4895 }
4896 case MemBarrierKind::kAnyStore:
4897 case MemBarrierKind::kLoadAny:
4898 case MemBarrierKind::kStoreStore: {
4899 // nop
4900 break;
4901 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004902 case MemBarrierKind::kNTStoreStore:
4903 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004904 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004905 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004906 }
4907}
4908
4909void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
Alex Light3a73ffb2021-01-25 14:11:05 +00004910 DCHECK(instruction->IsInstanceFieldGet() ||
4911 instruction->IsStaticFieldGet() ||
4912 instruction->IsPredicatedInstanceFieldGet());
Calin Juravle52c48962014-12-16 17:02:57 +00004913
Roland Levillain0d5a2812015-11-13 10:07:31 +00004914 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004915 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alex Light3a73ffb2021-01-25 14:11:05 +00004916 bool is_predicated = instruction->IsPredicatedInstanceFieldGet();
Nicolas Geoffray39468442014-09-02 15:17:15 +01004917 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004918 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4919 object_field_get_with_read_barrier
4920 ? LocationSummary::kCallOnSlowPath
4921 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004922 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004923 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004924 }
Alex Light3a73ffb2021-01-25 14:11:05 +00004925 // receiver_input
4926 locations->SetInAt(is_predicated ? 1 : 0, Location::RequiresRegister());
4927 if (is_predicated) {
4928 if (DataType::IsFloatingPointType(instruction->GetType())) {
4929 locations->SetInAt(0, Location::RequiresFpuRegister());
4930 } else {
4931 locations->SetInAt(0, Location::RequiresRegister());
4932 }
4933 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004934 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alex Light3a73ffb2021-01-25 14:11:05 +00004935 locations->SetOut(is_predicated ? Location::SameAsFirstInput()
4936 : Location::RequiresFpuRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004937 } else {
Alex Light3a73ffb2021-01-25 14:11:05 +00004938 // The output overlaps for an object field get when read barriers are
4939 // enabled: we do not want the move to overwrite the object's location, as
4940 // we need it to emit the read barrier. For predicated instructions we can
4941 // always overlap since the output is SameAsFirst and the default value.
4942 locations->SetOut(is_predicated ? Location::SameAsFirstInput() : Location::RequiresRegister(),
4943 object_field_get_with_read_barrier || is_predicated
4944 ? Location::kOutputOverlap
4945 : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004946 }
Calin Juravle52c48962014-12-16 17:02:57 +00004947}
4948
4949void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4950 const FieldInfo& field_info) {
Alex Light3a73ffb2021-01-25 14:11:05 +00004951 DCHECK(instruction->IsInstanceFieldGet() ||
4952 instruction->IsStaticFieldGet() ||
4953 instruction->IsPredicatedInstanceFieldGet());
Calin Juravle52c48962014-12-16 17:02:57 +00004954
4955 LocationSummary* locations = instruction->GetLocations();
Alex Light3a73ffb2021-01-25 14:11:05 +00004956 Location base_loc = locations->InAt(instruction->IsPredicatedInstanceFieldGet() ? 1 : 0);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004957 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004958 Location out = locations->Out();
4959 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004960 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4961 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004962 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4963
Vladimir Marko61b92282017-10-11 13:23:17 +01004964 if (load_type == DataType::Type::kReference) {
Ulya Trafimovich161911c2021-06-01 15:47:28 +01004965 // /* HeapReference<Object> */ out = *(base + offset)
4966 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4967 // Note that a potential implicit null check is handled in this
4968 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
4969 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4970 instruction, out, base, offset, /* needs_null_check= */ true);
4971 if (is_volatile) {
4972 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4973 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004974 } else {
Ulya Trafimovich161911c2021-06-01 15:47:28 +01004975 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4976 codegen_->MaybeRecordImplicitNullCheck(instruction);
4977 if (is_volatile) {
4978 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4979 }
4980 // If read barriers are enabled, emit read barriers other than
4981 // Baker's using a slow path (and also unpoison the loaded
4982 // reference, if heap poisoning is enabled).
4983 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4984 }
4985 } else {
4986 codegen_->LoadFromMemoryNoReference(load_type, out, Address(base, offset));
4987 codegen_->MaybeRecordImplicitNullCheck(instruction);
4988 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004989 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4990 }
Roland Levillain4d027112015-07-01 15:41:14 +01004991 }
Calin Juravle52c48962014-12-16 17:02:57 +00004992}
4993
4994void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4995 const FieldInfo& field_info) {
4996 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4997
4998 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004999 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005000 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04005001 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005002 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01005003 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00005004
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005005 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005006 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04005007 if (is_volatile) {
5008 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5009 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
5010 } else {
5011 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
5012 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005013 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04005014 if (is_volatile) {
5015 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5016 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
5017 } else {
5018 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5019 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005020 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005021 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01005022 // Temporary registers for the write barrier.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005023 locations->AddTemp(Location::RequiresRegister());
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005024 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005025 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01005026 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005027 locations->AddTemp(Location::RequiresRegister());
5028 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005029}
5030
Calin Juravle52c48962014-12-16 17:02:57 +00005031void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005032 uint32_t value_index,
5033 uint32_t extra_temp_index,
5034 DataType::Type field_type,
5035 Address field_addr,
5036 CpuRegister base,
5037 bool is_volatile,
Ulya Trafimovich30bb6af2021-06-15 17:34:51 +01005038 bool is_atomic,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005039 bool value_can_be_null) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005040 LocationSummary* locations = instruction->GetLocations();
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005041 Location value = locations->InAt(value_index);
Calin Juravle52c48962014-12-16 17:02:57 +00005042
5043 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005044 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00005045 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005046
Mark Mendellea5af682015-10-22 17:35:49 -04005047 bool maybe_record_implicit_null_check_done = false;
5048
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005049 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005050 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005051 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005052 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04005053 if (value.IsConstant()) {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005054 __ movb(field_addr, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04005055 } else {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005056 __ movb(field_addr, value.AsRegister<CpuRegister>());
Mark Mendell40741f32015-04-20 22:10:34 -04005057 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005058 break;
5059 }
5060
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005061 case DataType::Type::kUint16:
5062 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04005063 if (value.IsConstant()) {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005064 __ movw(field_addr, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04005065 } else {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005066 __ movw(field_addr, value.AsRegister<CpuRegister>());
Mark Mendell40741f32015-04-20 22:10:34 -04005067 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005068 break;
5069 }
5070
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005071 case DataType::Type::kInt32:
5072 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04005073 if (value.IsConstant()) {
5074 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005075 // `field_type == DataType::Type::kReference` implies `v == 0`.
5076 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01005077 // Note: if heap poisoning is enabled, no need to poison
5078 // (negate) `v` if it is a reference, as it would be null.
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005079 __ movl(field_addr, Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04005080 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005081 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005082 CpuRegister temp = locations->GetTemp(extra_temp_index).AsRegister<CpuRegister>();
Roland Levillain4d027112015-07-01 15:41:14 +01005083 __ movl(temp, value.AsRegister<CpuRegister>());
5084 __ PoisonHeapReference(temp);
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005085 __ movl(field_addr, temp);
Roland Levillain4d027112015-07-01 15:41:14 +01005086 } else {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005087 __ movl(field_addr, value.AsRegister<CpuRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01005088 }
Mark Mendell40741f32015-04-20 22:10:34 -04005089 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005090 break;
5091 }
5092
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005093 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04005094 if (value.IsConstant()) {
5095 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Ulya Trafimovich30bb6af2021-06-15 17:34:51 +01005096 if (is_atomic) {
5097 // Move constant into a register, then atomically store the register to memory.
5098 CpuRegister temp = locations->GetTemp(extra_temp_index).AsRegister<CpuRegister>();
5099 __ movq(temp, Immediate(v));
5100 __ movq(field_addr, temp);
5101 } else {
5102 codegen_->MoveInt64ToAddress(field_addr,
5103 Address::displace(field_addr, sizeof(int32_t)),
5104 v,
5105 instruction);
5106 }
Mark Mendellea5af682015-10-22 17:35:49 -04005107 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04005108 } else {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005109 __ movq(field_addr, value.AsRegister<CpuRegister>());
Mark Mendell40741f32015-04-20 22:10:34 -04005110 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005111 break;
5112 }
5113
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005114 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04005115 if (value.IsConstant()) {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005116 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
5117 __ movl(field_addr, Immediate(v));
Mark Mendellea5af682015-10-22 17:35:49 -04005118 } else {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005119 __ movss(field_addr, value.AsFpuRegister<XmmRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005120 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005121 break;
5122 }
5123
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005124 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04005125 if (value.IsConstant()) {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005126 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Ulya Trafimovich30bb6af2021-06-15 17:34:51 +01005127 if (is_atomic) {
5128 // Move constant into a register, then atomically store the register to memory.
5129 CpuRegister temp = locations->GetTemp(extra_temp_index).AsRegister<CpuRegister>();
5130 __ movq(temp, Immediate(v));
5131 __ movq(field_addr, temp);
5132 } else {
5133 codegen_->MoveInt64ToAddress(field_addr,
5134 Address::displace(field_addr, sizeof(int32_t)),
5135 v,
5136 instruction);
5137 }
Mark Mendellea5af682015-10-22 17:35:49 -04005138 maybe_record_implicit_null_check_done = true;
5139 } else {
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005140 __ movsd(field_addr, value.AsFpuRegister<XmmRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005141 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005142 break;
5143 }
5144
Aart Bik66c158e2018-01-31 12:55:04 -08005145 case DataType::Type::kUint32:
5146 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005147 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005148 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005149 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005150 }
Calin Juravle52c48962014-12-16 17:02:57 +00005151
Mark Mendellea5af682015-10-22 17:35:49 -04005152 if (!maybe_record_implicit_null_check_done) {
5153 codegen_->MaybeRecordImplicitNullCheck(instruction);
5154 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005155
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005156 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(value_index))) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005157 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005158 CpuRegister card = locations->GetTemp(extra_temp_index).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005159 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005160 }
5161
Calin Juravle52c48962014-12-16 17:02:57 +00005162 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005163 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005164 }
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005165}
5166
5167void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
5168 const FieldInfo& field_info,
5169 bool value_can_be_null) {
5170 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5171
5172 LocationSummary* locations = instruction->GetLocations();
5173 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
5174 bool is_volatile = field_info.IsVolatile();
5175 DataType::Type field_type = field_info.GetFieldType();
5176 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5177 bool is_predicated =
5178 instruction->IsInstanceFieldSet() && instruction->AsInstanceFieldSet()->GetIsPredicatedSet();
5179
5180 NearLabel pred_is_null;
5181 if (is_predicated) {
5182 __ testl(base, base);
5183 __ j(kZero, &pred_is_null);
5184 }
5185
5186 HandleFieldSet(instruction,
5187 /*value_index=*/ 1,
5188 /*extra_temp_index=*/ 1,
5189 field_type,
5190 Address(base, offset),
5191 base,
5192 is_volatile,
Ulya Trafimovich30bb6af2021-06-15 17:34:51 +01005193 /*is_atomic=*/ false,
Ulya Trafimovich6a4b2992021-06-11 12:02:17 +01005194 value_can_be_null);
Alex Light3a73ffb2021-01-25 14:11:05 +00005195
5196 if (is_predicated) {
5197 __ Bind(&pred_is_null);
5198 }
Calin Juravle52c48962014-12-16 17:02:57 +00005199}
5200
5201void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5202 HandleFieldSet(instruction, instruction->GetFieldInfo());
5203}
5204
5205void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005206 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005207}
5208
Alex Light3a73ffb2021-01-25 14:11:05 +00005209void LocationsBuilderX86_64::VisitPredicatedInstanceFieldGet(
5210 HPredicatedInstanceFieldGet* instruction) {
5211 HandleFieldGet(instruction);
5212}
5213
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005214void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00005215 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005216}
5217
Alex Light3a73ffb2021-01-25 14:11:05 +00005218void InstructionCodeGeneratorX86_64::VisitPredicatedInstanceFieldGet(
5219 HPredicatedInstanceFieldGet* instruction) {
5220 NearLabel finish;
5221 LocationSummary* locations = instruction->GetLocations();
5222 CpuRegister target = locations->InAt(1).AsRegister<CpuRegister>();
5223 __ testl(target, target);
5224 __ j(kZero, &finish);
5225 HandleFieldGet(instruction, instruction->GetFieldInfo());
5226 __ Bind(&finish);
5227}
5228
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005229void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00005230 HandleFieldGet(instruction, instruction->GetFieldInfo());
5231}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005232
Calin Juravle52c48962014-12-16 17:02:57 +00005233void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5234 HandleFieldGet(instruction);
5235}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005236
Calin Juravle52c48962014-12-16 17:02:57 +00005237void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5238 HandleFieldGet(instruction, instruction->GetFieldInfo());
5239}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005240
Calin Juravle52c48962014-12-16 17:02:57 +00005241void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5242 HandleFieldSet(instruction, instruction->GetFieldInfo());
5243}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005244
Calin Juravle52c48962014-12-16 17:02:57 +00005245void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005246 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005247}
5248
Vladimir Marko552a1342017-10-31 10:56:47 +00005249void LocationsBuilderX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5250 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(RAX));
5251}
5252
5253void InstructionCodeGeneratorX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5254 __ movl(CpuRegister(RDI), Immediate(instruction->GetFormat()->GetValue()));
5255 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5256}
5257
Calin Juravlee460d1d2015-09-29 04:52:17 +01005258void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
5259 HUnresolvedInstanceFieldGet* instruction) {
5260 FieldAccessCallingConventionX86_64 calling_convention;
5261 codegen_->CreateUnresolvedFieldLocationSummary(
5262 instruction, instruction->GetFieldType(), calling_convention);
5263}
5264
5265void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
5266 HUnresolvedInstanceFieldGet* instruction) {
5267 FieldAccessCallingConventionX86_64 calling_convention;
5268 codegen_->GenerateUnresolvedFieldAccess(instruction,
5269 instruction->GetFieldType(),
5270 instruction->GetFieldIndex(),
5271 instruction->GetDexPc(),
5272 calling_convention);
5273}
5274
5275void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
5276 HUnresolvedInstanceFieldSet* instruction) {
5277 FieldAccessCallingConventionX86_64 calling_convention;
5278 codegen_->CreateUnresolvedFieldLocationSummary(
5279 instruction, instruction->GetFieldType(), calling_convention);
5280}
5281
5282void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
5283 HUnresolvedInstanceFieldSet* instruction) {
5284 FieldAccessCallingConventionX86_64 calling_convention;
5285 codegen_->GenerateUnresolvedFieldAccess(instruction,
5286 instruction->GetFieldType(),
5287 instruction->GetFieldIndex(),
5288 instruction->GetDexPc(),
5289 calling_convention);
5290}
5291
5292void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
5293 HUnresolvedStaticFieldGet* instruction) {
5294 FieldAccessCallingConventionX86_64 calling_convention;
5295 codegen_->CreateUnresolvedFieldLocationSummary(
5296 instruction, instruction->GetFieldType(), calling_convention);
5297}
5298
5299void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
5300 HUnresolvedStaticFieldGet* instruction) {
5301 FieldAccessCallingConventionX86_64 calling_convention;
5302 codegen_->GenerateUnresolvedFieldAccess(instruction,
5303 instruction->GetFieldType(),
5304 instruction->GetFieldIndex(),
5305 instruction->GetDexPc(),
5306 calling_convention);
5307}
5308
5309void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
5310 HUnresolvedStaticFieldSet* instruction) {
5311 FieldAccessCallingConventionX86_64 calling_convention;
5312 codegen_->CreateUnresolvedFieldLocationSummary(
5313 instruction, instruction->GetFieldType(), calling_convention);
5314}
5315
5316void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
5317 HUnresolvedStaticFieldSet* instruction) {
5318 FieldAccessCallingConventionX86_64 calling_convention;
5319 codegen_->GenerateUnresolvedFieldAccess(instruction,
5320 instruction->GetFieldType(),
5321 instruction->GetFieldIndex(),
5322 instruction->GetDexPc(),
5323 calling_convention);
5324}
5325
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005326void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005327 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5328 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5329 ? Location::RequiresRegister()
5330 : Location::Any();
5331 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005332}
5333
Calin Juravle2ae48182016-03-16 14:05:09 +00005334void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5335 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005336 return;
5337 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005338 LocationSummary* locations = instruction->GetLocations();
5339 Location obj = locations->InAt(0);
5340
5341 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005342 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005343}
5344
Calin Juravle2ae48182016-03-16 14:05:09 +00005345void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005346 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005347 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005348
5349 LocationSummary* locations = instruction->GetLocations();
5350 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005351
5352 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005353 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005354 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005355 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005356 } else {
5357 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005358 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005359 __ jmp(slow_path->GetEntryLabel());
5360 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005361 }
5362 __ j(kEqual, slow_path->GetEntryLabel());
5363}
5364
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005365void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005366 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005367}
5368
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005369void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005370 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005371 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005372 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005373 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5374 object_array_get_with_read_barrier
5375 ? LocationSummary::kCallOnSlowPath
5376 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005377 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005378 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005379 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005380 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005381 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005382 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005383 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5384 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005385 // The output overlaps for an object array get when read barriers
5386 // are enabled: we do not want the move to overwrite the array's
5387 // location, as we need it to emit the read barrier.
5388 locations->SetOut(
5389 Location::RequiresRegister(),
5390 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005391 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005392}
5393
5394void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
5395 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005396 Location obj_loc = locations->InAt(0);
5397 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005398 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005399 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005400 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005401
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005402 DataType::Type type = instruction->GetType();
Ulya Trafimovich161911c2021-06-01 15:47:28 +01005403 if (type == DataType::Type::kReference) {
5404 static_assert(
5405 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5406 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
5407 // /* HeapReference<Object> */ out =
5408 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5409 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
5410 // Note that a potential implicit null check is handled in this
5411 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
5412 codegen_->GenerateArrayLoadWithBakerReadBarrier(
5413 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
5414 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005415 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005416 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Ulya Trafimovich161911c2021-06-01 15:47:28 +01005417 codegen_->MaybeRecordImplicitNullCheck(instruction);
5418 // If read barriers are enabled, emit read barriers other than
5419 // Baker's using a slow path (and also unpoison the loaded
5420 // reference, if heap poisoning is enabled).
5421 if (index.IsConstant()) {
5422 uint32_t offset =
5423 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
5424 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005425 } else {
Ulya Trafimovich161911c2021-06-01 15:47:28 +01005426 codegen_->MaybeGenerateReadBarrierSlow(
5427 instruction, out_loc, out_loc, obj_loc, data_offset, index);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005428 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005429 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005430 } else {
Ulya Trafimovich161911c2021-06-01 15:47:28 +01005431 if (type == DataType::Type::kUint16
5432 && mirror::kUseStringCompression
5433 && instruction->IsStringCharAt()) {
5434 // Branch cases into compressed and uncompressed for each index's type.
5435 CpuRegister out = out_loc.AsRegister<CpuRegister>();
5436 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5437 NearLabel done, not_compressed;
5438 __ testb(Address(obj, count_offset), Immediate(1));
5439 codegen_->MaybeRecordImplicitNullCheck(instruction);
5440 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5441 "Expecting 0=compressed, 1=uncompressed");
5442 __ j(kNotZero, &not_compressed);
5443 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
5444 __ jmp(&done);
5445 __ Bind(&not_compressed);
5446 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5447 __ Bind(&done);
5448 } else {
Ulya Trafimovichc8451cb2021-06-02 17:35:16 +01005449 ScaleFactor scale = CodeGenerator::ScaleFactorForType(type);
Ulya Trafimovich161911c2021-06-01 15:47:28 +01005450 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, scale, data_offset);
5451 codegen_->LoadFromMemoryNoReference(type, out_loc, src);
5452 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005453 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005454 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005455}
5456
5457void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005458 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005459
5460 bool needs_write_barrier =
5461 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005462 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005463
Vladimir Markoca6fff82017-10-03 14:49:14 +01005464 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005465 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005466 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005467
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005468 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005469 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005470 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005471 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005472 } else {
5473 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5474 }
5475
5476 if (needs_write_barrier) {
5477 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005478 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005479 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005480 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005481}
5482
5483void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5484 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005485 Location array_loc = locations->InAt(0);
5486 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005487 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005488 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005489 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005490 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005491 bool needs_write_barrier =
5492 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005493
5494 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005495 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005496 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005497 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005498 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005499 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005500 if (value.IsRegister()) {
5501 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005502 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005503 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005504 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005505 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005506 break;
5507 }
5508
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005509 case DataType::Type::kUint16:
5510 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005511 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005512 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005513 if (value.IsRegister()) {
5514 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005515 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005516 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005517 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005518 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005519 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005520 break;
5521 }
5522
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005523 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005524 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005525 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005526
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005527 if (!value.IsRegister()) {
5528 // Just setting null.
5529 DCHECK(instruction->InputAt(2)->IsNullConstant());
5530 DCHECK(value.IsConstant()) << value;
5531 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005532 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005533 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005534 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005535 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005536 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005537
5538 DCHECK(needs_write_barrier);
5539 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005540 Location temp_loc = locations->GetTemp(0);
5541 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005542
5543 bool can_value_be_null = instruction->GetValueCanBeNull();
5544 NearLabel do_store;
5545 if (can_value_be_null) {
5546 __ testl(register_value, register_value);
5547 __ j(kEqual, &do_store);
5548 }
5549
5550 SlowPathCode* slow_path = nullptr;
5551 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005552 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005553 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005554
5555 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5556 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5557 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005558
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005559 // Note that when Baker read barriers are enabled, the type
5560 // checks are performed without read barriers. This is fine,
5561 // even in the case where a class object is in the from-space
5562 // after the flip, as a comparison involving such a type would
5563 // not produce a false positive; it may of course produce a
5564 // false negative, in which case we would take the ArraySet
5565 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005566
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005567 // /* HeapReference<Class> */ temp = array->klass_
5568 __ movl(temp, Address(array, class_offset));
5569 codegen_->MaybeRecordImplicitNullCheck(instruction);
5570 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005571
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005572 // /* HeapReference<Class> */ temp = temp->component_type_
5573 __ movl(temp, Address(temp, component_offset));
5574 // If heap poisoning is enabled, no need to unpoison `temp`
5575 // nor the object reference in `register_value->klass`, as
5576 // we are comparing two poisoned references.
5577 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005578
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005579 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005580 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005581 __ j(kEqual, &do_put);
5582 // If heap poisoning is enabled, the `temp` reference has
5583 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005584 __ MaybeUnpoisonHeapReference(temp);
5585
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005586 // If heap poisoning is enabled, no need to unpoison the
5587 // heap reference loaded below, as it is only used for a
5588 // comparison with null.
5589 __ cmpl(Address(temp, super_offset), Immediate(0));
5590 __ j(kNotEqual, slow_path->GetEntryLabel());
5591 __ Bind(&do_put);
5592 } else {
5593 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005594 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005595 }
5596
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005597 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5598 codegen_->MarkGCCard(
5599 temp, card, array, value.AsRegister<CpuRegister>(), /* value_can_be_null= */ false);
5600
5601 if (can_value_be_null) {
5602 DCHECK(do_store.IsLinked());
5603 __ Bind(&do_store);
5604 }
5605
5606 Location source = value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005607 if (kPoisonHeapReferences) {
5608 __ movl(temp, register_value);
5609 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005610 source = temp_loc;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005611 }
5612
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005613 __ movl(address, source.AsRegister<CpuRegister>());
5614
5615 if (can_value_be_null || !needs_type_check) {
5616 codegen_->MaybeRecordImplicitNullCheck(instruction);
5617 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005618
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005619 if (slow_path != nullptr) {
5620 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005621 }
5622
5623 break;
5624 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005625
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005626 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005627 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005628 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005629 if (value.IsRegister()) {
5630 __ movl(address, value.AsRegister<CpuRegister>());
5631 } else {
5632 DCHECK(value.IsConstant()) << value;
5633 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5634 __ movl(address, Immediate(v));
5635 }
5636 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005637 break;
5638 }
5639
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005640 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005641 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005642 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005643 if (value.IsRegister()) {
5644 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005645 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005646 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005647 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005648 Address address_high =
5649 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005650 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005651 }
5652 break;
5653 }
5654
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005655 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005656 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005657 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005658 if (value.IsFpuRegister()) {
5659 __ movss(address, value.AsFpuRegister<XmmRegister>());
5660 } else {
5661 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005662 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005663 __ movl(address, Immediate(v));
5664 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005665 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005666 break;
5667 }
5668
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005669 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005670 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005671 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005672 if (value.IsFpuRegister()) {
5673 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5674 codegen_->MaybeRecordImplicitNullCheck(instruction);
5675 } else {
5676 int64_t v =
5677 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005678 Address address_high =
5679 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005680 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5681 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005682 break;
5683 }
5684
Aart Bik66c158e2018-01-31 12:55:04 -08005685 case DataType::Type::kUint32:
5686 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005687 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005688 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005689 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005690 }
5691}
5692
5693void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005694 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005695 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005696 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005697 if (!instruction->IsEmittedAtUseSite()) {
5698 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5699 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005700}
5701
5702void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005703 if (instruction->IsEmittedAtUseSite()) {
5704 return;
5705 }
5706
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005707 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005708 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005709 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5710 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005711 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005712 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005713 // Mask out most significant bit in case the array is String's array of char.
5714 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005715 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005716 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005717}
5718
5719void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005720 RegisterSet caller_saves = RegisterSet::Empty();
5721 InvokeRuntimeCallingConvention calling_convention;
5722 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5723 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5724 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005725 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005726 HInstruction* length = instruction->InputAt(1);
5727 if (!length->IsEmittedAtUseSite()) {
5728 locations->SetInAt(1, Location::RegisterOrConstant(length));
5729 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005730}
5731
5732void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5733 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005734 Location index_loc = locations->InAt(0);
5735 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005736 SlowPathCode* slow_path =
5737 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005738
Mark Mendell99dbd682015-04-22 16:18:52 -04005739 if (length_loc.IsConstant()) {
5740 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5741 if (index_loc.IsConstant()) {
5742 // BCE will remove the bounds check if we are guarenteed to pass.
5743 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5744 if (index < 0 || index >= length) {
5745 codegen_->AddSlowPath(slow_path);
5746 __ jmp(slow_path->GetEntryLabel());
5747 } else {
5748 // Some optimization after BCE may have generated this, and we should not
5749 // generate a bounds check if it is a valid range.
5750 }
5751 return;
5752 }
5753
5754 // We have to reverse the jump condition because the length is the constant.
5755 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5756 __ cmpl(index_reg, Immediate(length));
5757 codegen_->AddSlowPath(slow_path);
5758 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005759 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005760 HInstruction* array_length = instruction->InputAt(1);
5761 if (array_length->IsEmittedAtUseSite()) {
5762 // Address the length field in the array.
5763 DCHECK(array_length->IsArrayLength());
5764 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5765 Location array_loc = array_length->GetLocations()->InAt(0);
5766 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005767 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005768 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5769 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005770 CpuRegister length_reg = CpuRegister(TMP);
5771 __ movl(length_reg, array_len);
5772 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005773 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005774 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005775 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005776 // Checking the bound for general case:
5777 // Array of char or String's array when the compression feature off.
5778 if (index_loc.IsConstant()) {
5779 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5780 __ cmpl(array_len, Immediate(value));
5781 } else {
5782 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5783 }
5784 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005785 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005786 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005787 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005788 }
5789 codegen_->AddSlowPath(slow_path);
5790 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005791 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005792}
5793
5794void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5795 CpuRegister card,
5796 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005797 CpuRegister value,
5798 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005799 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005800 if (value_can_be_null) {
5801 __ testl(value, value);
5802 __ j(kEqual, &is_null);
5803 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005804 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005805 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005806 /* no_rip= */ true));
Roland Levillainc73f0522018-08-14 15:16:50 +01005807 // Calculate the offset (in the card table) of the card corresponding to
5808 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005809 __ movq(temp, object);
5810 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005811 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5812 // `object`'s card.
5813 //
5814 // Register `card` contains the address of the card table. Note that the card
5815 // table's base is biased during its creation so that it always starts at an
5816 // address whose least-significant byte is equal to `kCardDirty` (see
5817 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5818 // below writes the `kCardDirty` (byte) value into the `object`'s card
5819 // (located at `card + object >> kCardShift`).
5820 //
5821 // This dual use of the value in register `card` (1. to calculate the location
5822 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5823 // (no need to explicitly load `kCardDirty` as an immediate value).
Roland Levillain4d027112015-07-01 15:41:14 +01005824 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005825 if (value_can_be_null) {
5826 __ Bind(&is_null);
5827 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005828}
5829
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005830void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005831 LOG(FATAL) << "Unimplemented";
5832}
5833
5834void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005835 if (instruction->GetNext()->IsSuspendCheck() &&
5836 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5837 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5838 // The back edge will generate the suspend check.
5839 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5840 }
5841
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005842 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5843}
5844
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005845void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005846 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5847 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005848 // In suspend check slow path, usually there are no caller-save registers at all.
5849 // If SIMD instructions are present, however, we force spilling all live SIMD
5850 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005851 locations->SetCustomSlowPathCallerSaves(
5852 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005853}
5854
5855void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005856 HBasicBlock* block = instruction->GetBlock();
5857 if (block->GetLoopInformation() != nullptr) {
5858 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5859 // The back edge will generate the suspend check.
5860 return;
5861 }
5862 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5863 // The goto will generate the suspend check.
5864 return;
5865 }
5866 GenerateSuspendCheck(instruction, nullptr);
5867}
5868
5869void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5870 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005871 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005872 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5873 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005874 slow_path =
5875 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005876 instruction->SetSlowPath(slow_path);
5877 codegen_->AddSlowPath(slow_path);
5878 if (successor != nullptr) {
5879 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005880 }
5881 } else {
5882 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5883 }
5884
Andreas Gampe542451c2016-07-26 09:02:02 -07005885 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005886 /* no_rip= */ true),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005887 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005888 if (successor == nullptr) {
5889 __ j(kNotEqual, slow_path->GetEntryLabel());
5890 __ Bind(slow_path->GetReturnLabel());
5891 } else {
5892 __ j(kEqual, codegen_->GetLabelOf(successor));
5893 __ jmp(slow_path->GetEntryLabel());
5894 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005895}
5896
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005897X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5898 return codegen_->GetAssembler();
5899}
5900
5901void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005902 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005903 Location source = move->GetSource();
5904 Location destination = move->GetDestination();
5905
5906 if (source.IsRegister()) {
5907 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005908 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005909 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005910 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005911 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005912 } else {
5913 DCHECK(destination.IsDoubleStackSlot());
5914 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005915 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005916 }
5917 } else if (source.IsStackSlot()) {
5918 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005919 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005920 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005921 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005922 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005923 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005924 } else {
5925 DCHECK(destination.IsStackSlot());
5926 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5927 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5928 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005929 } else if (source.IsDoubleStackSlot()) {
5930 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005931 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005932 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005933 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005934 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5935 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005936 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005937 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005938 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5939 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5940 }
Aart Bik5576f372017-03-23 16:17:37 -07005941 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005942 if (destination.IsFpuRegister()) {
5943 __ movups(destination.AsFpuRegister<XmmRegister>(),
5944 Address(CpuRegister(RSP), source.GetStackIndex()));
5945 } else {
5946 DCHECK(destination.IsSIMDStackSlot());
5947 size_t high = kX86_64WordSize;
5948 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5949 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5950 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5951 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5952 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005953 } else if (source.IsConstant()) {
5954 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005955 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5956 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005957 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005958 if (value == 0) {
5959 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5960 } else {
5961 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5962 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005963 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005964 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005965 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005966 }
5967 } else if (constant->IsLongConstant()) {
5968 int64_t value = constant->AsLongConstant()->GetValue();
5969 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005970 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005971 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005972 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005973 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005974 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005975 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005976 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005977 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005978 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005979 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005980 } else {
5981 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005982 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005983 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5984 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005985 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005986 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005987 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005988 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005989 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005990 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005991 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005992 } else {
5993 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005994 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005995 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005996 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005997 } else if (source.IsFpuRegister()) {
5998 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005999 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006000 } else if (destination.IsStackSlot()) {
6001 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00006002 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006003 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006004 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00006005 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006006 } else {
6007 DCHECK(destination.IsSIMDStackSlot());
6008 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
6009 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006010 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006011 }
6012}
6013
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006014void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006015 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006016 __ movl(Address(CpuRegister(RSP), mem), reg);
6017 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006018}
6019
Mark Mendell8a1c7282015-06-29 15:41:28 -04006020void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
6021 __ movq(CpuRegister(TMP), reg1);
6022 __ movq(reg1, reg2);
6023 __ movq(reg2, CpuRegister(TMP));
6024}
6025
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006026void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
6027 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
6028 __ movq(Address(CpuRegister(RSP), mem), reg);
6029 __ movq(reg, CpuRegister(TMP));
6030}
6031
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006032void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
6033 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
6034 __ movss(Address(CpuRegister(RSP), mem), reg);
6035 __ movd(reg, CpuRegister(TMP));
6036}
6037
6038void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
6039 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
6040 __ movsd(Address(CpuRegister(RSP), mem), reg);
6041 __ movd(reg, CpuRegister(TMP));
6042}
6043
Aart Bikcfe50bb2017-12-12 14:54:12 -08006044void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
6045 size_t extra_slot = 2 * kX86_64WordSize;
6046 __ subq(CpuRegister(RSP), Immediate(extra_slot));
6047 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
6048 ExchangeMemory64(0, mem + extra_slot, 2);
6049 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
6050 __ addq(CpuRegister(RSP), Immediate(extra_slot));
6051}
6052
6053void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
6054 ScratchRegisterScope ensure_scratch(
6055 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
6056
6057 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
6058 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
6059 __ movl(CpuRegister(ensure_scratch.GetRegister()),
6060 Address(CpuRegister(RSP), mem2 + stack_offset));
6061 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
6062 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
6063 CpuRegister(ensure_scratch.GetRegister()));
6064}
6065
6066void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
6067 ScratchRegisterScope ensure_scratch(
6068 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
6069
6070 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
6071
6072 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
6073 for (int i = 0; i < num_of_qwords; i++) {
6074 __ movq(CpuRegister(TMP),
6075 Address(CpuRegister(RSP), mem1 + stack_offset));
6076 __ movq(CpuRegister(ensure_scratch.GetRegister()),
6077 Address(CpuRegister(RSP), mem2 + stack_offset));
6078 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
6079 CpuRegister(TMP));
6080 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
6081 CpuRegister(ensure_scratch.GetRegister()));
6082 stack_offset += kX86_64WordSize;
6083 }
6084}
6085
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006086void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006087 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006088 Location source = move->GetSource();
6089 Location destination = move->GetDestination();
6090
6091 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04006092 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006093 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006094 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006095 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006096 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006097 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006098 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006099 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006100 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006101 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006102 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01006103 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006104 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006105 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006106 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
6107 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
6108 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006109 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006110 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006111 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006112 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006113 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006114 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006115 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006116 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08006117 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
6118 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
6119 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
6120 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6121 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
6122 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006123 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01006124 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00006125 }
6126}
6127
6128
6129void ParallelMoveResolverX86_64::SpillScratch(int reg) {
6130 __ pushq(CpuRegister(reg));
6131}
6132
6133
6134void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
6135 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006136}
6137
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006138void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006139 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00006140 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
6141 const size_t status_byte_offset =
6142 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01006143 constexpr uint32_t shifted_visibly_initialized_value =
6144 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00006145
Vladimir Markobf121912019-06-04 13:49:05 +01006146 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00006147 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006148 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006149}
6150
Vladimir Marko175e7862018-03-27 09:03:13 +00006151void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
6152 CpuRegister temp) {
6153 uint32_t path_to_root = check->GetBitstringPathToRoot();
6154 uint32_t mask = check->GetBitstringMask();
6155 DCHECK(IsPowerOfTwo(mask + 1));
6156 size_t mask_bits = WhichPowerOf2(mask + 1);
6157
6158 if (mask_bits == 16u) {
6159 // Compare the bitstring in memory.
6160 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
6161 } else {
6162 // /* uint32_t */ temp = temp->status_
6163 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
6164 // Compare the bitstring bits using SUB.
6165 __ subl(temp, Immediate(path_to_root));
6166 // Shift out bits that do not contribute to the comparison.
6167 __ shll(temp, Immediate(32u - mask_bits));
6168 }
6169}
6170
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006171HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
6172 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006173 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006174 case HLoadClass::LoadKind::kInvalid:
6175 LOG(FATAL) << "UNREACHABLE";
6176 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006177 case HLoadClass::LoadKind::kReferrersClass:
6178 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006179 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006180 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006181 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko8f63f102020-09-28 12:10:28 +01006182 case HLoadClass::LoadKind::kBssEntryPublic:
6183 case HLoadClass::LoadKind::kBssEntryPackage:
Vladimir Marko695348f2020-05-19 14:42:02 +01006184 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006185 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006186 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006187 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01006188 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006189 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006190 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006191 break;
6192 }
6193 return desired_class_load_kind;
6194}
6195
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006196void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006197 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006198 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006199 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00006200 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006201 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00006202 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00006203 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006204 return;
6205 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01006206 DCHECK_EQ(cls->NeedsAccessCheck(),
6207 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
6208 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006209
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006210 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6211 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006212 ? LocationSummary::kCallOnSlowPath
6213 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006214 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006215 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006216 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006217 }
6218
Vladimir Marko41559982017-01-06 14:04:23 +00006219 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006220 locations->SetInAt(0, Location::RequiresRegister());
6221 }
6222 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006223 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6224 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6225 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006226 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006227 } else {
6228 // For non-Baker read barrier we have a temp-clobbering call.
6229 }
6230 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006231}
6232
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006233Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006234 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006235 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006236 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006237 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006238 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006239 PatchInfo<Label>* info = &jit_class_patches_.back();
6240 return &info->label;
6241}
6242
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006243// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6244// move.
6245void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006246 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006247 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006248 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006249 return;
6250 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01006251 DCHECK_EQ(cls->NeedsAccessCheck(),
6252 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
6253 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Calin Juravle580b6092015-10-06 17:35:58 +01006254
Vladimir Marko41559982017-01-06 14:04:23 +00006255 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006256 Location out_loc = locations->Out();
6257 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006258
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006259 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6260 ? kWithoutReadBarrier
6261 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006262 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00006263 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006264 case HLoadClass::LoadKind::kReferrersClass: {
6265 DCHECK(!cls->CanCallRuntime());
6266 DCHECK(!cls->MustGenerateClinitCheck());
6267 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6268 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
6269 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006270 cls,
6271 out_loc,
6272 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006273 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006274 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006275 break;
6276 }
6277 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01006278 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6279 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006280 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006281 __ leal(out,
6282 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006283 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006284 break;
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006285 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006286 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006287 __ movl(out,
6288 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Markode91ca92020-10-27 13:41:40 +00006289 codegen_->RecordBootImageRelRoPatch(CodeGenerator::GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006290 break;
6291 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01006292 case HLoadClass::LoadKind::kBssEntry:
6293 case HLoadClass::LoadKind::kBssEntryPublic:
6294 case HLoadClass::LoadKind::kBssEntryPackage: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006295 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006296 /* no_rip= */ false);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006297 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6298 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
6299 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006300 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006301 generate_null_check = true;
6302 break;
6303 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006304 case HLoadClass::LoadKind::kJitBootImageAddress: {
6305 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6306 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6307 DCHECK_NE(address, 0u);
6308 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6309 break;
6310 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006311 case HLoadClass::LoadKind::kJitTableAddress: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006312 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006313 /* no_rip= */ true);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006314 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006315 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006316 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006317 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006318 break;
6319 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006320 default:
6321 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
6322 UNREACHABLE();
6323 }
6324
6325 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6326 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006327 SlowPathCode* slow_path =
6328 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006329 codegen_->AddSlowPath(slow_path);
6330 if (generate_null_check) {
6331 __ testl(out, out);
6332 __ j(kEqual, slow_path->GetEntryLabel());
6333 }
6334 if (cls->MustGenerateClinitCheck()) {
6335 GenerateClassInitializationCheck(slow_path, out);
6336 } else {
6337 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006338 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006339 }
6340}
6341
6342void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
6343 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006344 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006345 locations->SetInAt(0, Location::RequiresRegister());
6346 if (check->HasUses()) {
6347 locations->SetOut(Location::SameAsFirstInput());
6348 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006349 // Rely on the type initialization to save everything we need.
6350 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006351}
6352
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006353void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6354 // Custom calling convention: RAX serves as both input and output.
6355 Location location = Location::RegisterLocation(RAX);
6356 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6357}
6358
6359void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6360 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6361}
6362
Orion Hodson18259d72018-04-12 11:18:23 +01006363void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6364 // Custom calling convention: RAX serves as both input and output.
6365 Location location = Location::RegisterLocation(RAX);
6366 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6367}
6368
6369void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6370 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6371}
6372
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006373void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006374 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006375 SlowPathCode* slow_path =
6376 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006377 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006378 GenerateClassInitializationCheck(slow_path,
6379 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006380}
6381
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006382HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
6383 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006384 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006385 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006386 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006387 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko695348f2020-05-19 14:42:02 +01006388 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006389 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006390 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006391 case HLoadString::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01006392 DCHECK(GetCompilerOptions().IsJitCompiler());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006393 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006394 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006395 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006396 }
6397 return desired_string_load_kind;
6398}
6399
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006400void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006401 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006402 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006403 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006404 locations->SetOut(Location::RegisterLocation(RAX));
6405 } else {
6406 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006407 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
6408 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006409 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006410 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006411 } else {
6412 // For non-Baker read barrier we have a temp-clobbering call.
6413 }
6414 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006415 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006416}
6417
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006418Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006419 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006420 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006421 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006422 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006423 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006424 PatchInfo<Label>* info = &jit_string_patches_.back();
6425 return &info->label;
6426}
6427
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006428// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6429// move.
6430void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006431 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006432 Location out_loc = locations->Out();
6433 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006434
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006435 switch (load->GetLoadKind()) {
6436 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006437 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6438 codegen_->GetCompilerOptions().IsBootImageExtension());
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006439 __ leal(out,
6440 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006441 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006442 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006443 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006444 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006445 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006446 __ movl(out,
6447 Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset, /* no_rip= */ false));
Vladimir Markode91ca92020-10-27 13:41:40 +00006448 codegen_->RecordBootImageRelRoPatch(CodeGenerator::GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006449 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006450 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006451 case HLoadString::LoadKind::kBssEntry: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006452 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006453 /* no_rip= */ false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006454 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6455 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006456 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006457 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006458 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006459 codegen_->AddSlowPath(slow_path);
6460 __ testl(out, out);
6461 __ j(kEqual, slow_path->GetEntryLabel());
6462 __ Bind(slow_path->GetExitLabel());
6463 return;
6464 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006465 case HLoadString::LoadKind::kJitBootImageAddress: {
6466 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6467 DCHECK_NE(address, 0u);
6468 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6469 return;
6470 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006471 case HLoadString::LoadKind::kJitTableAddress: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00006472 Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006473 /* no_rip= */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006474 Label* fixup_label = codegen_->NewJitRootStringPatch(
6475 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006476 // /* GcRoot<mirror::String> */ out = *address
6477 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6478 return;
6479 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006480 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006481 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006482 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006483
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006484 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006485 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006486 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006487 codegen_->InvokeRuntime(kQuickResolveString,
6488 load,
6489 load->GetDexPc());
6490 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006491}
6492
David Brazdilcb1c0552015-08-04 16:22:25 +01006493static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006494 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08006495 /* no_rip= */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006496}
6497
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006498void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6499 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006500 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006501 locations->SetOut(Location::RequiresRegister());
6502}
6503
6504void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006505 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6506}
6507
6508void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006509 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006510}
6511
6512void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6513 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006514}
6515
6516void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006517 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6518 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006519 InvokeRuntimeCallingConvention calling_convention;
6520 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6521}
6522
6523void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006524 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006525 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006526}
6527
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006528// Temp is used for read barrier.
6529static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6530 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006531 !kUseBakerReadBarrier &&
6532 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006533 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006534 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6535 return 1;
6536 }
6537 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006538}
6539
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006540// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6541// interface pointer, the current interface is compared in memory.
6542// The other checks have one temp for loading the object's class.
6543static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6544 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6545 return 2;
6546 }
6547 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006548}
6549
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006550void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006551 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006552 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006553 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006554 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006555 case TypeCheckKind::kExactCheck:
6556 case TypeCheckKind::kAbstractClassCheck:
6557 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006558 case TypeCheckKind::kArrayObjectCheck: {
6559 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6560 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6561 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006562 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006563 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006564 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006565 case TypeCheckKind::kUnresolvedCheck:
6566 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006567 call_kind = LocationSummary::kCallOnSlowPath;
6568 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006569 case TypeCheckKind::kBitstringCheck:
6570 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006571 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006572
Vladimir Markoca6fff82017-10-03 14:49:14 +01006573 LocationSummary* locations =
6574 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006575 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006576 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006577 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006578 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006579 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6580 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6581 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6582 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6583 } else {
6584 locations->SetInAt(1, Location::Any());
6585 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006586 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6587 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006588 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006589}
6590
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006591void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006592 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006593 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006594 Location obj_loc = locations->InAt(0);
6595 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006596 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006597 Location out_loc = locations->Out();
6598 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006599 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6600 DCHECK_LE(num_temps, 1u);
6601 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006602 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006603 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6604 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6605 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006606 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006607 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006608
6609 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006610 // Avoid null check if we know obj is not null.
6611 if (instruction->MustDoNullCheck()) {
6612 __ testl(obj, obj);
6613 __ j(kEqual, &zero);
6614 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006615
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006616 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006617 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006618 ReadBarrierOption read_barrier_option =
6619 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006620 // /* HeapReference<Class> */ out = obj->klass_
6621 GenerateReferenceLoadTwoRegisters(instruction,
6622 out_loc,
6623 obj_loc,
6624 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006625 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006626 if (cls.IsRegister()) {
6627 __ cmpl(out, cls.AsRegister<CpuRegister>());
6628 } else {
6629 DCHECK(cls.IsStackSlot()) << cls;
6630 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6631 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006632 if (zero.IsLinked()) {
6633 // Classes must be equal for the instanceof to succeed.
6634 __ j(kNotEqual, &zero);
6635 __ movl(out, Immediate(1));
6636 __ jmp(&done);
6637 } else {
6638 __ setcc(kEqual, out);
6639 // setcc only sets the low byte.
6640 __ andl(out, Immediate(1));
6641 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006642 break;
6643 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006644
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006645 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006646 ReadBarrierOption read_barrier_option =
6647 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006648 // /* HeapReference<Class> */ out = obj->klass_
6649 GenerateReferenceLoadTwoRegisters(instruction,
6650 out_loc,
6651 obj_loc,
6652 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006653 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006654 // If the class is abstract, we eagerly fetch the super class of the
6655 // object to avoid doing a comparison we know will fail.
6656 NearLabel loop, success;
6657 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006658 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006659 GenerateReferenceLoadOneRegister(instruction,
6660 out_loc,
6661 super_offset,
6662 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006663 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006664 __ testl(out, out);
6665 // If `out` is null, we use it for the result, and jump to `done`.
6666 __ j(kEqual, &done);
6667 if (cls.IsRegister()) {
6668 __ cmpl(out, cls.AsRegister<CpuRegister>());
6669 } else {
6670 DCHECK(cls.IsStackSlot()) << cls;
6671 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6672 }
6673 __ j(kNotEqual, &loop);
6674 __ movl(out, Immediate(1));
6675 if (zero.IsLinked()) {
6676 __ jmp(&done);
6677 }
6678 break;
6679 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006680
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006681 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006682 ReadBarrierOption read_barrier_option =
6683 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006684 // /* HeapReference<Class> */ out = obj->klass_
6685 GenerateReferenceLoadTwoRegisters(instruction,
6686 out_loc,
6687 obj_loc,
6688 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006689 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006690 // Walk over the class hierarchy to find a match.
6691 NearLabel loop, success;
6692 __ Bind(&loop);
6693 if (cls.IsRegister()) {
6694 __ cmpl(out, cls.AsRegister<CpuRegister>());
6695 } else {
6696 DCHECK(cls.IsStackSlot()) << cls;
6697 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6698 }
6699 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006700 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006701 GenerateReferenceLoadOneRegister(instruction,
6702 out_loc,
6703 super_offset,
6704 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006705 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006706 __ testl(out, out);
6707 __ j(kNotEqual, &loop);
6708 // If `out` is null, we use it for the result, and jump to `done`.
6709 __ jmp(&done);
6710 __ Bind(&success);
6711 __ movl(out, Immediate(1));
6712 if (zero.IsLinked()) {
6713 __ jmp(&done);
6714 }
6715 break;
6716 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006717
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006718 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006719 ReadBarrierOption read_barrier_option =
6720 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006721 // /* HeapReference<Class> */ out = obj->klass_
6722 GenerateReferenceLoadTwoRegisters(instruction,
6723 out_loc,
6724 obj_loc,
6725 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006726 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006727 // Do an exact check.
6728 NearLabel exact_check;
6729 if (cls.IsRegister()) {
6730 __ cmpl(out, cls.AsRegister<CpuRegister>());
6731 } else {
6732 DCHECK(cls.IsStackSlot()) << cls;
6733 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6734 }
6735 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006736 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006737 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006738 GenerateReferenceLoadOneRegister(instruction,
6739 out_loc,
6740 component_offset,
6741 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006742 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006743 __ testl(out, out);
6744 // If `out` is null, we use it for the result, and jump to `done`.
6745 __ j(kEqual, &done);
6746 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6747 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006748 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006749 __ movl(out, Immediate(1));
6750 __ jmp(&done);
6751 break;
6752 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006753
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006754 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006755 // No read barrier since the slow path will retry upon failure.
6756 // /* HeapReference<Class> */ out = obj->klass_
6757 GenerateReferenceLoadTwoRegisters(instruction,
6758 out_loc,
6759 obj_loc,
6760 class_offset,
6761 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006762 if (cls.IsRegister()) {
6763 __ cmpl(out, cls.AsRegister<CpuRegister>());
6764 } else {
6765 DCHECK(cls.IsStackSlot()) << cls;
6766 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6767 }
6768 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006769 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006770 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006771 codegen_->AddSlowPath(slow_path);
6772 __ j(kNotEqual, slow_path->GetEntryLabel());
6773 __ movl(out, Immediate(1));
6774 if (zero.IsLinked()) {
6775 __ jmp(&done);
6776 }
6777 break;
6778 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006779
Calin Juravle98893e12015-10-02 21:05:03 +01006780 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006781 case TypeCheckKind::kInterfaceCheck: {
6782 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006783 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006784 // cases.
6785 //
6786 // We cannot directly call the InstanceofNonTrivial runtime
6787 // entry point without resorting to a type checking slow path
6788 // here (i.e. by calling InvokeRuntime directly), as it would
6789 // require to assign fixed registers for the inputs of this
6790 // HInstanceOf instruction (following the runtime calling
6791 // convention), which might be cluttered by the potential first
6792 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006793 //
6794 // TODO: Introduce a new runtime entry point taking the object
6795 // to test (instead of its class) as argument, and let it deal
6796 // with the read barrier issues. This will let us refactor this
6797 // case of the `switch` code as it was previously (with a direct
6798 // call to the runtime not using a type checking slow path).
6799 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006800 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006801 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006802 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006803 codegen_->AddSlowPath(slow_path);
6804 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006805 if (zero.IsLinked()) {
6806 __ jmp(&done);
6807 }
6808 break;
6809 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006810
6811 case TypeCheckKind::kBitstringCheck: {
6812 // /* HeapReference<Class> */ temp = obj->klass_
6813 GenerateReferenceLoadTwoRegisters(instruction,
6814 out_loc,
6815 obj_loc,
6816 class_offset,
6817 kWithoutReadBarrier);
6818
6819 GenerateBitstringTypeCheckCompare(instruction, out);
6820 if (zero.IsLinked()) {
6821 __ j(kNotEqual, &zero);
6822 __ movl(out, Immediate(1));
6823 __ jmp(&done);
6824 } else {
6825 __ setcc(kEqual, out);
6826 // setcc only sets the low byte.
6827 __ andl(out, Immediate(1));
6828 }
6829 break;
6830 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006831 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006832
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006833 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006834 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006835 __ xorl(out, out);
6836 }
6837
6838 if (done.IsLinked()) {
6839 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006840 }
6841
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006842 if (slow_path != nullptr) {
6843 __ Bind(slow_path->GetExitLabel());
6844 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006845}
6846
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006847void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006848 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006849 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006850 LocationSummary* locations =
6851 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006852 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006853 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6854 // Require a register for the interface check since there is a loop that compares the class to
6855 // a memory address.
6856 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006857 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6858 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6859 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6860 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006861 } else {
6862 locations->SetInAt(1, Location::Any());
6863 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006864 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6865 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006866}
6867
6868void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006869 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006870 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006871 Location obj_loc = locations->InAt(0);
6872 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006873 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006874 Location temp_loc = locations->GetTemp(0);
6875 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006876 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6877 DCHECK_GE(num_temps, 1u);
6878 DCHECK_LE(num_temps, 2u);
6879 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006880 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6881 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6882 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6883 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6884 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6885 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006886 const uint32_t object_array_data_offset =
6887 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006888
Vladimir Marko87584542017-12-12 17:47:52 +00006889 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006890 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006891 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6892 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006893 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006894
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006895
6896 NearLabel done;
6897 // Avoid null check if we know obj is not null.
6898 if (instruction->MustDoNullCheck()) {
6899 __ testl(obj, obj);
6900 __ j(kEqual, &done);
6901 }
6902
Roland Levillain0d5a2812015-11-13 10:07:31 +00006903 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006904 case TypeCheckKind::kExactCheck:
6905 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006906 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006907 GenerateReferenceLoadTwoRegisters(instruction,
6908 temp_loc,
6909 obj_loc,
6910 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006911 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006912 if (cls.IsRegister()) {
6913 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6914 } else {
6915 DCHECK(cls.IsStackSlot()) << cls;
6916 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6917 }
6918 // Jump to slow path for throwing the exception or doing a
6919 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006920 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006921 break;
6922 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006923
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006924 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006925 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006926 GenerateReferenceLoadTwoRegisters(instruction,
6927 temp_loc,
6928 obj_loc,
6929 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006930 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006931 // If the class is abstract, we eagerly fetch the super class of the
6932 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006933 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006934 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006935 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006936 GenerateReferenceLoadOneRegister(instruction,
6937 temp_loc,
6938 super_offset,
6939 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006940 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006941
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006942 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6943 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006944 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006945 // Otherwise, compare the classes.
6946 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006947 if (cls.IsRegister()) {
6948 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6949 } else {
6950 DCHECK(cls.IsStackSlot()) << cls;
6951 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6952 }
6953 __ j(kNotEqual, &loop);
6954 break;
6955 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006956
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006957 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006958 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006959 GenerateReferenceLoadTwoRegisters(instruction,
6960 temp_loc,
6961 obj_loc,
6962 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006963 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006964 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006965 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006966 __ Bind(&loop);
6967 if (cls.IsRegister()) {
6968 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6969 } else {
6970 DCHECK(cls.IsStackSlot()) << cls;
6971 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6972 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006973 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006974
Roland Levillain0d5a2812015-11-13 10:07:31 +00006975 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006976 GenerateReferenceLoadOneRegister(instruction,
6977 temp_loc,
6978 super_offset,
6979 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006980 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006981
6982 // If the class reference currently in `temp` is not null, jump
6983 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006984 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006985 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006986 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006987 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006988 break;
6989 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006990
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006991 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006992 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006993 GenerateReferenceLoadTwoRegisters(instruction,
6994 temp_loc,
6995 obj_loc,
6996 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006997 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006998 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006999 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007000 if (cls.IsRegister()) {
7001 __ cmpl(temp, cls.AsRegister<CpuRegister>());
7002 } else {
7003 DCHECK(cls.IsStackSlot()) << cls;
7004 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
7005 }
7006 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007007
7008 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007009 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007010 GenerateReferenceLoadOneRegister(instruction,
7011 temp_loc,
7012 component_offset,
7013 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007014 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007015
7016 // If the component type is not null (i.e. the object is indeed
7017 // an array), jump to label `check_non_primitive_component_type`
7018 // to further check that this component type is not a primitive
7019 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007020 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007021 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007022 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007023 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007024 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007025 break;
7026 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007027
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007028 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07007029 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007030 //
7031 // We cannot directly call the CheckCast runtime entry point
7032 // without resorting to a type checking slow path here (i.e. by
7033 // calling InvokeRuntime directly), as it would require to
7034 // assign fixed registers for the inputs of this HInstanceOf
7035 // instruction (following the runtime calling convention), which
7036 // might be cluttered by the potential first read barrier
7037 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007038 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007039 break;
7040 }
7041
Vladimir Marko175e7862018-03-27 09:03:13 +00007042 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007043 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
7044 // We can not get false positives by doing this.
7045 // /* HeapReference<Class> */ temp = obj->klass_
7046 GenerateReferenceLoadTwoRegisters(instruction,
7047 temp_loc,
7048 obj_loc,
7049 class_offset,
7050 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07007051
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007052 // /* HeapReference<Class> */ temp = temp->iftable_
7053 GenerateReferenceLoadTwoRegisters(instruction,
7054 temp_loc,
7055 temp_loc,
7056 iftable_offset,
7057 kWithoutReadBarrier);
7058 // Iftable is never null.
7059 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
7060 // Maybe poison the `cls` for direct comparison with memory.
7061 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
7062 // Loop through the iftable and check if any class matches.
7063 NearLabel start_loop;
7064 __ Bind(&start_loop);
7065 // Need to subtract first to handle the empty array case.
7066 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
7067 __ j(kNegative, type_check_slow_path->GetEntryLabel());
7068 // Go to next interface if the classes do not match.
7069 __ cmpl(cls.AsRegister<CpuRegister>(),
7070 CodeGeneratorX86_64::ArrayAddress(temp,
7071 maybe_temp2_loc,
7072 TIMES_4,
7073 object_array_data_offset));
7074 __ j(kNotEqual, &start_loop); // Return if same class.
7075 // If `cls` was poisoned above, unpoison it.
7076 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007077 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00007078 }
7079
7080 case TypeCheckKind::kBitstringCheck: {
7081 // /* HeapReference<Class> */ temp = obj->klass_
7082 GenerateReferenceLoadTwoRegisters(instruction,
7083 temp_loc,
7084 obj_loc,
7085 class_offset,
7086 kWithoutReadBarrier);
7087
7088 GenerateBitstringTypeCheckCompare(instruction, temp);
7089 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
7090 break;
7091 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007092 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007093
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007094 if (done.IsLinked()) {
7095 __ Bind(&done);
7096 }
7097
Roland Levillain0d5a2812015-11-13 10:07:31 +00007098 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007099}
7100
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007101void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007102 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7103 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007104 InvokeRuntimeCallingConvention calling_convention;
7105 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7106}
7107
7108void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01007109 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01007110 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01007111 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00007112 if (instruction->IsEnter()) {
7113 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7114 } else {
7115 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7116 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007117}
7118
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05307119void LocationsBuilderX86_64::VisitX86AndNot(HX86AndNot* instruction) {
7120 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7121 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
7122 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7123 locations->SetInAt(0, Location::RequiresRegister());
7124 // There is no immediate variant of negated bitwise and in X86.
7125 locations->SetInAt(1, Location::RequiresRegister());
7126 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7127}
7128
7129void LocationsBuilderX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
7130 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7131 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
7132 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7133 locations->SetInAt(0, Location::RequiresRegister());
7134 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7135}
7136
7137void InstructionCodeGeneratorX86_64::VisitX86AndNot(HX86AndNot* instruction) {
7138 LocationSummary* locations = instruction->GetLocations();
7139 Location first = locations->InAt(0);
7140 Location second = locations->InAt(1);
7141 Location dest = locations->Out();
7142 __ andn(dest.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
7143}
7144
7145void InstructionCodeGeneratorX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
7146 LocationSummary* locations = instruction->GetLocations();
7147 Location src = locations->InAt(0);
7148 Location dest = locations->Out();
7149 switch (instruction->GetOpKind()) {
7150 case HInstruction::kAnd:
7151 __ blsr(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
7152 break;
7153 case HInstruction::kXor:
7154 __ blsmsk(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
7155 break;
7156 default:
7157 LOG(FATAL) << "Unreachable";
7158 }
7159}
7160
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007161void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
7162void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
7163void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
7164
7165void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
7166 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007167 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007168 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
7169 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007170 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04007171 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007172 locations->SetOut(Location::SameAsFirstInput());
7173}
7174
7175void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
7176 HandleBitwiseOperation(instruction);
7177}
7178
7179void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
7180 HandleBitwiseOperation(instruction);
7181}
7182
7183void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
7184 HandleBitwiseOperation(instruction);
7185}
7186
7187void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
7188 LocationSummary* locations = instruction->GetLocations();
7189 Location first = locations->InAt(0);
7190 Location second = locations->InAt(1);
7191 DCHECK(first.Equals(locations->Out()));
7192
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007193 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007194 if (second.IsRegister()) {
7195 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007196 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007197 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007198 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007199 } else {
7200 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007201 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007202 }
7203 } else if (second.IsConstant()) {
7204 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
7205 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007206 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007207 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007208 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007209 } else {
7210 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007211 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007212 }
7213 } else {
7214 Address address(CpuRegister(RSP), second.GetStackIndex());
7215 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007216 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007217 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007218 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007219 } else {
7220 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007221 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007222 }
7223 }
7224 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007225 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007226 CpuRegister first_reg = first.AsRegister<CpuRegister>();
7227 bool second_is_constant = false;
7228 int64_t value = 0;
7229 if (second.IsConstant()) {
7230 second_is_constant = true;
7231 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007232 }
Mark Mendell40741f32015-04-20 22:10:34 -04007233 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007234
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007235 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007236 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007237 if (is_int32_value) {
7238 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
7239 } else {
7240 __ andq(first_reg, codegen_->LiteralInt64Address(value));
7241 }
7242 } else if (second.IsDoubleStackSlot()) {
7243 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007244 } else {
7245 __ andq(first_reg, second.AsRegister<CpuRegister>());
7246 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007247 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007248 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007249 if (is_int32_value) {
7250 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
7251 } else {
7252 __ orq(first_reg, codegen_->LiteralInt64Address(value));
7253 }
7254 } else if (second.IsDoubleStackSlot()) {
7255 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007256 } else {
7257 __ orq(first_reg, second.AsRegister<CpuRegister>());
7258 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007259 } else {
7260 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007261 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007262 if (is_int32_value) {
7263 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
7264 } else {
7265 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
7266 }
7267 } else if (second.IsDoubleStackSlot()) {
7268 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007269 } else {
7270 __ xorq(first_reg, second.AsRegister<CpuRegister>());
7271 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007272 }
7273 }
7274}
7275
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007276void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
7277 HInstruction* instruction,
7278 Location out,
7279 uint32_t offset,
7280 Location maybe_temp,
7281 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007282 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007283 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007284 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007285 if (kUseBakerReadBarrier) {
7286 // Load with fast path based Baker's read barrier.
7287 // /* HeapReference<Object> */ out = *(out + offset)
7288 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007289 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007290 } else {
7291 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007292 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007293 // in the following move operation, as we will need it for the
7294 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007295 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007296 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007297 // /* HeapReference<Object> */ out = *(out + offset)
7298 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007299 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007300 }
7301 } else {
7302 // Plain load with no read barrier.
7303 // /* HeapReference<Object> */ out = *(out + offset)
7304 __ movl(out_reg, Address(out_reg, offset));
7305 __ MaybeUnpoisonHeapReference(out_reg);
7306 }
7307}
7308
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007309void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
7310 HInstruction* instruction,
7311 Location out,
7312 Location obj,
7313 uint32_t offset,
7314 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007315 CpuRegister out_reg = out.AsRegister<CpuRegister>();
7316 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007317 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007318 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007319 if (kUseBakerReadBarrier) {
7320 // Load with fast path based Baker's read barrier.
7321 // /* HeapReference<Object> */ out = *(obj + offset)
7322 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007323 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007324 } else {
7325 // Load with slow path based read barrier.
7326 // /* HeapReference<Object> */ out = *(obj + offset)
7327 __ movl(out_reg, Address(obj_reg, offset));
7328 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7329 }
7330 } else {
7331 // Plain load with no read barrier.
7332 // /* HeapReference<Object> */ out = *(obj + offset)
7333 __ movl(out_reg, Address(obj_reg, offset));
7334 __ MaybeUnpoisonHeapReference(out_reg);
7335 }
7336}
7337
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007338void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
7339 HInstruction* instruction,
7340 Location root,
7341 const Address& address,
7342 Label* fixup_label,
7343 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007344 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007345 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007346 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007347 if (kUseBakerReadBarrier) {
7348 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7349 // Baker's read barrier are used:
7350 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007351 // root = obj.field;
7352 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7353 // if (temp != null) {
7354 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007355 // }
7356
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007357 // /* GcRoot<mirror::Object> */ root = *address
7358 __ movl(root_reg, address);
7359 if (fixup_label != nullptr) {
7360 __ Bind(fixup_label);
7361 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007362 static_assert(
7363 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7364 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7365 "have different sizes.");
7366 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7367 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7368 "have different sizes.");
7369
Vladimir Marko953437b2016-08-24 08:30:46 +00007370 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007371 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007372 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007373 codegen_->AddSlowPath(slow_path);
7374
Roland Levillaind966ce72017-02-09 16:20:14 +00007375 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
7376 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007377 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Andreas Gampe3db70682018-12-26 15:12:03 -08007378 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip= */ true), Immediate(0));
Roland Levillaind966ce72017-02-09 16:20:14 +00007379 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007380 __ j(kNotEqual, slow_path->GetEntryLabel());
7381 __ Bind(slow_path->GetExitLabel());
7382 } else {
7383 // GC root loaded through a slow path for read barriers other
7384 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007385 // /* GcRoot<mirror::Object>* */ root = address
7386 __ leaq(root_reg, address);
7387 if (fixup_label != nullptr) {
7388 __ Bind(fixup_label);
7389 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007390 // /* mirror::Object* */ root = root->Read()
7391 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7392 }
7393 } else {
7394 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007395 // /* GcRoot<mirror::Object> */ root = *address
7396 __ movl(root_reg, address);
7397 if (fixup_label != nullptr) {
7398 __ Bind(fixup_label);
7399 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007400 // Note that GC roots are not affected by heap poisoning, thus we
7401 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007402 }
7403}
7404
7405void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7406 Location ref,
7407 CpuRegister obj,
7408 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007409 bool needs_null_check) {
7410 DCHECK(kEmitCompilerReadBarrier);
7411 DCHECK(kUseBakerReadBarrier);
7412
7413 // /* HeapReference<Object> */ ref = *(obj + offset)
7414 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007415 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007416}
7417
7418void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7419 Location ref,
7420 CpuRegister obj,
7421 uint32_t data_offset,
7422 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007423 bool needs_null_check) {
7424 DCHECK(kEmitCompilerReadBarrier);
7425 DCHECK(kUseBakerReadBarrier);
7426
Roland Levillain3d312422016-06-23 13:53:42 +01007427 static_assert(
7428 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7429 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007430 // /* HeapReference<Object> */ ref =
7431 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007432 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007433 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007434}
7435
7436void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7437 Location ref,
7438 CpuRegister obj,
7439 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007440 bool needs_null_check,
7441 bool always_update_field,
7442 CpuRegister* temp1,
7443 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007444 DCHECK(kEmitCompilerReadBarrier);
7445 DCHECK(kUseBakerReadBarrier);
7446
7447 // In slow path based read barriers, the read barrier call is
7448 // inserted after the original load. However, in fast path based
7449 // Baker's read barriers, we need to perform the load of
7450 // mirror::Object::monitor_ *before* the original reference load.
7451 // This load-load ordering is required by the read barrier.
7452 // The fast path/slow path (for Baker's algorithm) should look like:
7453 //
7454 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7455 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7456 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007457 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007458 // if (is_gray) {
7459 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7460 // }
7461 //
7462 // Note: the original implementation in ReadBarrier::Barrier is
7463 // slightly more complex as:
7464 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007465 // the high-bits of rb_state, which are expected to be all zeroes
7466 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
7467 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007468 // - it performs additional checks that we do not do here for
7469 // performance reasons.
7470
7471 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007472 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7473
Vladimir Marko953437b2016-08-24 08:30:46 +00007474 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007475 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007476 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007477 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7478 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7479 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7480
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007481 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007482 // ref = ReadBarrier::Mark(ref);
7483 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7484 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007485 if (needs_null_check) {
7486 MaybeRecordImplicitNullCheck(instruction);
7487 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007488
7489 // Load fence to prevent load-load reordering.
7490 // Note that this is a no-op, thanks to the x86-64 memory model.
7491 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7492
7493 // The actual reference load.
7494 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007495 __ movl(ref_reg, src); // Flags are unaffected.
7496
7497 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7498 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007499 SlowPathCode* slow_path;
7500 if (always_update_field) {
7501 DCHECK(temp1 != nullptr);
7502 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007503 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007504 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp1, *temp2);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007505 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007506 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007507 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007508 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007509 AddSlowPath(slow_path);
7510
7511 // We have done the "if" of the gray bit check above, now branch based on the flags.
7512 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007513
7514 // Object* ref = ref_addr->AsMirrorPtr()
7515 __ MaybeUnpoisonHeapReference(ref_reg);
7516
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007517 __ Bind(slow_path->GetExitLabel());
7518}
7519
7520void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7521 Location out,
7522 Location ref,
7523 Location obj,
7524 uint32_t offset,
7525 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007526 DCHECK(kEmitCompilerReadBarrier);
7527
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007528 // Insert a slow path based read barrier *after* the reference load.
7529 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007530 // If heap poisoning is enabled, the unpoisoning of the loaded
7531 // reference will be carried out by the runtime within the slow
7532 // path.
7533 //
7534 // Note that `ref` currently does not get unpoisoned (when heap
7535 // poisoning is enabled), which is alright as the `ref` argument is
7536 // not used by the artReadBarrierSlow entry point.
7537 //
7538 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007539 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007540 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7541 AddSlowPath(slow_path);
7542
Roland Levillain0d5a2812015-11-13 10:07:31 +00007543 __ jmp(slow_path->GetEntryLabel());
7544 __ Bind(slow_path->GetExitLabel());
7545}
7546
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007547void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7548 Location out,
7549 Location ref,
7550 Location obj,
7551 uint32_t offset,
7552 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007553 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007554 // Baker's read barriers shall be handled by the fast path
7555 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7556 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007557 // If heap poisoning is enabled, unpoisoning will be taken care of
7558 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007559 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007560 } else if (kPoisonHeapReferences) {
7561 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7562 }
7563}
7564
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007565void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7566 Location out,
7567 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007568 DCHECK(kEmitCompilerReadBarrier);
7569
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007570 // Insert a slow path based read barrier *after* the GC root load.
7571 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007572 // Note that GC roots are not affected by heap poisoning, so we do
7573 // not need to do anything special for this here.
7574 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007575 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007576 AddSlowPath(slow_path);
7577
Roland Levillain0d5a2812015-11-13 10:07:31 +00007578 __ jmp(slow_path->GetEntryLabel());
7579 __ Bind(slow_path->GetExitLabel());
7580}
7581
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007582void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007583 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007584 LOG(FATAL) << "Unreachable";
7585}
7586
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007587void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007588 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007589 LOG(FATAL) << "Unreachable";
7590}
7591
Mark Mendellfe57faa2015-09-18 09:26:15 -04007592// Simple implementation of packed switch - generate cascaded compare/jumps.
7593void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7594 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007595 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007596 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007597 locations->AddTemp(Location::RequiresRegister());
7598 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007599}
7600
7601void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7602 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007603 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007604 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007605 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7606 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7607 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007608 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7609
7610 // Should we generate smaller inline compare/jumps?
7611 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7612 // Figure out the correct compare values and jump conditions.
7613 // Handle the first compare/branch as a special case because it might
7614 // jump to the default case.
7615 DCHECK_GT(num_entries, 2u);
7616 Condition first_condition;
7617 uint32_t index;
7618 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7619 if (lower_bound != 0) {
7620 first_condition = kLess;
7621 __ cmpl(value_reg_in, Immediate(lower_bound));
7622 __ j(first_condition, codegen_->GetLabelOf(default_block));
7623 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7624
7625 index = 1;
7626 } else {
7627 // Handle all the compare/jumps below.
7628 first_condition = kBelow;
7629 index = 0;
7630 }
7631
7632 // Handle the rest of the compare/jumps.
7633 for (; index + 1 < num_entries; index += 2) {
7634 int32_t compare_to_value = lower_bound + index + 1;
7635 __ cmpl(value_reg_in, Immediate(compare_to_value));
7636 // Jump to successors[index] if value < case_value[index].
7637 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7638 // Jump to successors[index + 1] if value == case_value[index + 1].
7639 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7640 }
7641
7642 if (index != num_entries) {
7643 // There are an odd number of entries. Handle the last one.
7644 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007645 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007646 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7647 }
7648
7649 // And the default for any other value.
7650 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7651 __ jmp(codegen_->GetLabelOf(default_block));
7652 }
7653 return;
7654 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007655
7656 // Remove the bias, if needed.
7657 Register value_reg_out = value_reg_in.AsRegister();
7658 if (lower_bound != 0) {
7659 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7660 value_reg_out = temp_reg.AsRegister();
7661 }
7662 CpuRegister value_reg(value_reg_out);
7663
7664 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007665 __ cmpl(value_reg, Immediate(num_entries - 1));
7666 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007667
Mark Mendell9c86b482015-09-18 13:36:07 -04007668 // We are in the range of the table.
7669 // Load the address of the jump table in the constant area.
7670 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007671
Mark Mendell9c86b482015-09-18 13:36:07 -04007672 // Load the (signed) offset from the jump table.
7673 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7674
7675 // Add the offset to the address of the table base.
7676 __ addq(temp_reg, base_reg);
7677
7678 // And jump.
7679 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007680}
7681
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007682void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7683 ATTRIBUTE_UNUSED) {
7684 LOG(FATAL) << "Unreachable";
7685}
7686
7687void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7688 ATTRIBUTE_UNUSED) {
7689 LOG(FATAL) << "Unreachable";
7690}
7691
Aart Bikc5d47542016-01-27 17:00:35 -08007692void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7693 if (value == 0) {
7694 __ xorl(dest, dest);
7695 } else {
7696 __ movl(dest, Immediate(value));
7697 }
7698}
7699
Mark Mendell92e83bf2015-05-07 11:25:03 -04007700void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7701 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007702 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007703 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007704 } else if (IsUint<32>(value)) {
7705 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007706 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7707 } else {
7708 __ movq(dest, Immediate(value));
7709 }
7710}
7711
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007712void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7713 if (value == 0) {
7714 __ xorps(dest, dest);
7715 } else {
7716 __ movss(dest, LiteralInt32Address(value));
7717 }
7718}
7719
7720void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7721 if (value == 0) {
7722 __ xorpd(dest, dest);
7723 } else {
7724 __ movsd(dest, LiteralInt64Address(value));
7725 }
7726}
7727
7728void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7729 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7730}
7731
7732void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7733 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7734}
7735
Aart Bika19616e2016-02-01 18:57:58 -08007736void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7737 if (value == 0) {
7738 __ testl(dest, dest);
7739 } else {
7740 __ cmpl(dest, Immediate(value));
7741 }
7742}
7743
7744void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7745 if (IsInt<32>(value)) {
7746 if (value == 0) {
7747 __ testq(dest, dest);
7748 } else {
7749 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7750 }
7751 } else {
7752 // Value won't fit in an int.
7753 __ cmpq(dest, LiteralInt64Address(value));
7754 }
7755}
7756
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007757void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7758 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007759 GenerateIntCompare(lhs_reg, rhs);
7760}
7761
7762void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007763 if (rhs.IsConstant()) {
7764 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007765 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007766 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007767 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007768 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007769 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007770 }
7771}
7772
7773void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7774 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7775 if (rhs.IsConstant()) {
7776 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7777 Compare64BitValue(lhs_reg, value);
7778 } else if (rhs.IsDoubleStackSlot()) {
7779 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7780 } else {
7781 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7782 }
7783}
7784
7785Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7786 Location index,
7787 ScaleFactor scale,
7788 uint32_t data_offset) {
7789 return index.IsConstant() ?
7790 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7791 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7792}
7793
Mark Mendellcfa410b2015-05-25 16:02:44 -04007794void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7795 DCHECK(dest.IsDoubleStackSlot());
7796 if (IsInt<32>(value)) {
7797 // Can move directly as an int32 constant.
7798 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7799 Immediate(static_cast<int32_t>(value)));
7800 } else {
7801 Load64BitValue(CpuRegister(TMP), value);
7802 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7803 }
7804}
7805
Mark Mendell9c86b482015-09-18 13:36:07 -04007806/**
7807 * Class to handle late fixup of offsets into constant area.
7808 */
7809class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7810 public:
7811 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7812 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7813
7814 protected:
7815 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7816
7817 CodeGeneratorX86_64* codegen_;
7818
7819 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01007820 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell9c86b482015-09-18 13:36:07 -04007821 // Patch the correct offset for the instruction. We use the address of the
7822 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7823 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7824 int32_t relative_position = constant_offset - pos;
7825
7826 // Patch in the right value.
7827 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7828 }
7829
7830 // Location in constant area that the fixup refers to.
7831 size_t offset_into_constant_area_;
7832};
7833
7834/**
7835 t * Class to handle late fixup of offsets to a jump table that will be created in the
7836 * constant area.
7837 */
7838class JumpTableRIPFixup : public RIPFixup {
7839 public:
7840 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7841 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7842
7843 void CreateJumpTable() {
7844 X86_64Assembler* assembler = codegen_->GetAssembler();
7845
7846 // Ensure that the reference to the jump table has the correct offset.
7847 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7848 SetOffset(offset_in_constant_table);
7849
7850 // Compute the offset from the start of the function to this jump table.
7851 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7852
7853 // Populate the jump table with the correct values for the jump table.
7854 int32_t num_entries = switch_instr_->GetNumEntries();
7855 HBasicBlock* block = switch_instr_->GetBlock();
7856 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7857 // The value that we want is the target offset - the position of the table.
7858 for (int32_t i = 0; i < num_entries; i++) {
7859 HBasicBlock* b = successors[i];
7860 Label* l = codegen_->GetLabelOf(b);
7861 DCHECK(l->IsBound());
7862 int32_t offset_to_block = l->Position() - current_table_offset;
7863 assembler->AppendInt32(offset_to_block);
7864 }
7865 }
7866
7867 private:
7868 const HPackedSwitch* switch_instr_;
7869};
7870
Mark Mendellf55c3e02015-03-26 21:07:46 -04007871void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7872 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007873 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007874 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7875 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007876 assembler->Align(4, 0);
7877 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007878
7879 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007880 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007881 jump_table->CreateJumpTable();
7882 }
7883
7884 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007885 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007886 }
7887
7888 // And finish up.
7889 CodeGenerator::Finalize(allocator);
7890}
7891
Mark Mendellf55c3e02015-03-26 21:07:46 -04007892Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007893 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007894 return Address::RIP(fixup);
7895}
7896
7897Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007898 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007899 return Address::RIP(fixup);
7900}
7901
7902Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007903 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007904 return Address::RIP(fixup);
7905}
7906
7907Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007908 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007909 return Address::RIP(fixup);
7910}
7911
Andreas Gampe85b62f22015-09-09 13:15:38 -07007912// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007913void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007914 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007915 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007916 return;
7917 }
7918
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007919 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007920
7921 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7922 if (trg.Equals(return_loc)) {
7923 return;
7924 }
7925
7926 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007927 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007928 parallel_move.AddMove(return_loc, trg, type, nullptr);
7929 GetMoveResolver()->EmitNativeCode(&parallel_move);
7930}
7931
Mark Mendell9c86b482015-09-18 13:36:07 -04007932Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7933 // Create a fixup to be used to create and address the jump table.
7934 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007935 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007936
7937 // We have to populate the jump tables.
7938 fixups_to_jump_tables_.push_back(table_fixup);
7939 return Address::RIP(table_fixup);
7940}
7941
Mark Mendellea5af682015-10-22 17:35:49 -04007942void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7943 const Address& addr_high,
7944 int64_t v,
7945 HInstruction* instruction) {
7946 if (IsInt<32>(v)) {
7947 int32_t v_32 = v;
7948 __ movq(addr_low, Immediate(v_32));
7949 MaybeRecordImplicitNullCheck(instruction);
7950 } else {
7951 // Didn't fit in a register. Do it in pieces.
7952 int32_t low_v = Low32Bits(v);
7953 int32_t high_v = High32Bits(v);
7954 __ movl(addr_low, Immediate(low_v));
7955 MaybeRecordImplicitNullCheck(instruction);
7956 __ movl(addr_high, Immediate(high_v));
7957 }
7958}
7959
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007960void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7961 const uint8_t* roots_data,
7962 const PatchInfo<Label>& info,
7963 uint64_t index_in_table) const {
7964 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7965 uintptr_t address =
7966 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00007967 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007968 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7969 dchecked_integral_cast<uint32_t>(address);
7970}
7971
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007972void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7973 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007974 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007975 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007976 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007977 }
7978
7979 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007980 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007981 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007982 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007983 }
7984}
7985
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05307986bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
7987 return codegen_->GetInstructionSetFeatures().HasAVX();
7988}
7989
7990bool LocationsBuilderX86_64::CpuHasAvx2FeatureFlag() {
7991 return codegen_->GetInstructionSetFeatures().HasAVX2();
7992}
7993
7994bool InstructionCodeGeneratorX86_64::CpuHasAvxFeatureFlag() {
7995 return codegen_->GetInstructionSetFeatures().HasAVX();
7996}
7997
7998bool InstructionCodeGeneratorX86_64::CpuHasAvx2FeatureFlag() {
7999 return codegen_->GetInstructionSetFeatures().HasAVX2();
8000}
8001
Roland Levillain4d027112015-07-01 15:41:14 +01008002#undef __
8003
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01008004} // namespace x86_64
8005} // namespace art