blob: 3ae6515c1314c4bc14023eb0243594613c3c3575 [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010018
Vladimir Marko86c87522020-05-11 16:55:55 +010019#include "arch/x86/jni_frame_x86.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000020#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010022#include "code_generator_utils.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +000024#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010025#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010026#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Nicolas Geoffray8b8d93d2020-09-17 14:30:01 +010028#include "interpreter/mterp/nterp.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "intrinsics.h"
Ulya Trafimovichec696e52022-01-26 10:21:32 +000030#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040031#include "intrinsics_x86.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000032#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010033#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070034#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070035#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070036#include "mirror/class-inl.h"
Andra Danciu52d2c0c2020-09-15 14:27:21 +000037#include "mirror/var_handle.h"
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +000038#include "optimizing/nodes.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000039#include "scoped_thread_state_change-inl.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010040#include "thread.h"
Mythri Alle98aefe02023-02-27 18:50:44 +000041#include "trace.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000042#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010043#include "utils/stack_checks.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000044#include "utils/x86/assembler_x86.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010045#include "utils/x86/managed_register_x86.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000046
VladimĂ­r Marko434d9682022-11-04 14:04:17 +000047namespace art HIDDEN {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010048
Roland Levillain0d5a2812015-11-13 10:07:31 +000049template<class MirrorType>
50class GcRoot;
51
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000052namespace x86 {
53
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010054static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010055static constexpr Register kMethodRegisterArgument = EAX;
Mark Mendell5f874182015-03-04 15:42:45 -050056static constexpr Register kCoreCalleeSaves[] = { EBP, ESI, EDI };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010057
Mark Mendell24f2dfa2015-01-14 19:51:45 -050058static constexpr int kC2ConditionMask = 0x400;
59
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000060static constexpr int kFakeReturnRegister = Register(8);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +000061
Aart Bik1f8d51b2018-02-15 10:42:37 -080062static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
63static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
64
Vladimir Marko3232dbb2018-07-25 15:42:46 +010065static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
66 InvokeRuntimeCallingConvention calling_convention;
67 RegisterSet caller_saves = RegisterSet::Empty();
68 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
69 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
70 // that the the kPrimNot result register is the same as the first argument register.
71 return caller_saves;
72}
73
Roland Levillain7cbd27f2016-08-11 23:53:33 +010074// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
75#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070076#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077
Andreas Gampe85b62f22015-09-09 13:15:38 -070078class NullCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000080 explicit NullCheckSlowPathX86(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010082 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +010083 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010084 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000085 if (instruction_->CanThrowIntoCatchBlock()) {
86 // Live registers will be restored in the catch block if caught.
87 SaveLiveRegisters(codegen, instruction_->GetLocations());
88 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010089 x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexandre Rames8158f282015-08-07 10:26:17 +010090 instruction_,
91 instruction_->GetDexPc(),
92 this);
Roland Levillain888d0672015-11-23 18:53:50 +000093 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010094 }
95
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010096 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010097
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010098 const char* GetDescription() const override { return "NullCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010099
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100100 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100101 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
102};
103
Andreas Gampe85b62f22015-09-09 13:15:38 -0700104class DivZeroCheckSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000105 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000106 explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100108 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100109 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000110 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100111 x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000112 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000113 }
114
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100115 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100116
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100117 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100118
Calin Juravled0d48522014-11-04 16:40:20 +0000119 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000120 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
121};
122
Andreas Gampe85b62f22015-09-09 13:15:38 -0700123class DivRemMinusOneSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000124 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000125 DivRemMinusOneSlowPathX86(HInstruction* instruction, Register reg, bool is_div)
126 : SlowPathCode(instruction), reg_(reg), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000127
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100128 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000129 __ Bind(GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 if (is_div_) {
131 __ negl(reg_);
132 } else {
133 __ movl(reg_, Immediate(0));
134 }
Calin Juravled0d48522014-11-04 16:40:20 +0000135 __ jmp(GetExitLabel());
136 }
137
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100138 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100139
Calin Juravled0d48522014-11-04 16:40:20 +0000140 private:
141 Register reg_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000142 bool is_div_;
143 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
Calin Juravled0d48522014-11-04 16:40:20 +0000144};
145
Andreas Gampe85b62f22015-09-09 13:15:38 -0700146class BoundsCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100147 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000148 explicit BoundsCheckSlowPathX86(HBoundsCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100149
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100150 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100151 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100152 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100153 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000154 if (instruction_->CanThrowIntoCatchBlock()) {
155 // Live registers will be restored in the catch block if caught.
Vladimir Markod77cf742022-04-12 10:46:28 +0100156 SaveLiveRegisters(codegen, locations);
David Brazdil77a48ae2015-09-15 12:34:04 +0000157 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400158
Vladimir Markod77cf742022-04-12 10:46:28 +0100159 Location index_loc = locations->InAt(0);
Mark Mendellee8d9712016-07-12 11:13:15 -0400160 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100161 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markod77cf742022-04-12 10:46:28 +0100162 Location index_arg = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
163 Location length_arg = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
164
165 // Are we using an array length from memory?
166 if (!length_loc.IsValid()) {
167 DCHECK(instruction_->InputAt(1)->IsArrayLength());
Vladimir Markocde64972023-04-25 16:40:06 +0000168 HArrayLength* array_length = instruction_->InputAt(1)->AsArrayLength();
Vladimir Markod77cf742022-04-12 10:46:28 +0100169 DCHECK(array_length->IsEmittedAtUseSite());
170 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400171 Location array_loc = array_length->GetLocations()->InAt(0);
Vladimir Markod77cf742022-04-12 10:46:28 +0100172 if (!index_loc.Equals(length_arg)) {
173 // The index is not clobbered by loading the length directly to `length_arg`.
174 __ movl(length_arg.AsRegister<Register>(),
175 Address(array_loc.AsRegister<Register>(), len_offset));
176 x86_codegen->Move32(index_arg, index_loc);
177 } else if (!array_loc.Equals(index_arg)) {
178 // The array reference is not clobbered by the index move.
179 x86_codegen->Move32(index_arg, index_loc);
180 __ movl(length_arg.AsRegister<Register>(),
181 Address(array_loc.AsRegister<Register>(), len_offset));
182 } else {
183 // We do not have a temporary we could use, so swap the registers using the
184 // parallel move resolver and replace the array with the length afterwards.
185 codegen->EmitParallelMoves(
186 index_loc,
187 index_arg,
188 DataType::Type::kInt32,
189 array_loc,
190 length_arg,
191 DataType::Type::kReference);
192 __ movl(length_arg.AsRegister<Register>(),
193 Address(length_arg.AsRegister<Register>(), len_offset));
Mark Mendellee8d9712016-07-12 11:13:15 -0400194 }
Vladimir Markod77cf742022-04-12 10:46:28 +0100195 if (mirror::kUseStringCompression && array_length->IsStringLength()) {
196 __ shrl(length_arg.AsRegister<Register>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700197 }
Vladimir Markod77cf742022-04-12 10:46:28 +0100198 } else {
199 // We're moving two locations to locations that could overlap,
200 // so we need a parallel move resolver.
201 codegen->EmitParallelMoves(
202 index_loc,
203 index_arg,
204 DataType::Type::kInt32,
205 length_loc,
206 length_arg,
207 DataType::Type::kInt32);
Mark Mendellee8d9712016-07-12 11:13:15 -0400208 }
Vladimir Markod77cf742022-04-12 10:46:28 +0100209
Vladimir Markocde64972023-04-25 16:40:06 +0000210 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
Serban Constantinescuba45db02016-07-12 22:53:02 +0100211 ? kQuickThrowStringBounds
212 : kQuickThrowArrayBounds;
213 x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100214 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000215 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100216 }
217
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100218 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100219
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100220 const char* GetDescription() const override { return "BoundsCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100221
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100222 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100223 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
224};
225
Andreas Gampe85b62f22015-09-09 13:15:38 -0700226class SuspendCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000227 public:
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000228 SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000229 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000230
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100231 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700232 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100233 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000234 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700235 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100236 x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000237 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700238 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100239 if (successor_ == nullptr) {
240 __ jmp(GetReturnLabel());
241 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100242 __ jmp(x86_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100243 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000244 }
245
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100246 Label* GetReturnLabel() {
247 DCHECK(successor_ == nullptr);
248 return &return_label_;
249 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000250
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100251 HBasicBlock* GetSuccessor() const {
252 return successor_;
253 }
254
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100255 const char* GetDescription() const override { return "SuspendCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100256
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000257 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100258 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000259 Label return_label_;
260
261 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
262};
263
Vladimir Markoaad75c62016-10-03 08:46:48 +0000264class LoadStringSlowPathX86 : public SlowPathCode {
265 public:
266 explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {}
267
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100268 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000269 LocationSummary* locations = instruction_->GetLocations();
270 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
271
272 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
273 __ Bind(GetEntryLabel());
274 SaveLiveRegisters(codegen, locations);
275
276 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markocde64972023-04-25 16:40:06 +0000277 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000278 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000279 x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
280 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
281 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
282 RestoreLiveRegisters(codegen, locations);
283
Vladimir Markoaad75c62016-10-03 08:46:48 +0000284 __ jmp(GetExitLabel());
285 }
286
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100287 const char* GetDescription() const override { return "LoadStringSlowPathX86"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000288
289 private:
290 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
291};
292
Andreas Gampe85b62f22015-09-09 13:15:38 -0700293class LoadClassSlowPathX86 : public SlowPathCode {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000294 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100295 LoadClassSlowPathX86(HLoadClass* cls, HInstruction* at)
296 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000297 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100298 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000299 }
300
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100301 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000302 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100303 Location out = locations->Out();
304 const uint32_t dex_pc = instruction_->GetDexPc();
305 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
306 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
307
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000308 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
309 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000310 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000311
312 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100313 if (must_resolve_type) {
Santiago Aboy Solanesa0232ad2021-11-08 17:00:06 +0000314 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_codegen->GetGraph()->GetDexFile()) ||
Santiago Aboy Solanes69a87e32022-03-08 16:43:54 +0000315 x86_codegen->GetCompilerOptions().WithinOatFile(&cls_->GetDexFile()) ||
316 ContainsElement(Runtime::Current()->GetClassLinker()->GetBootClassPath(),
317 &cls_->GetDexFile()));
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100318 dex::TypeIndex type_index = cls_->GetTypeIndex();
319 __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
Vladimir Marko8f63f102020-09-28 12:10:28 +0100320 if (cls_->NeedsAccessCheck()) {
321 CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
322 x86_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
323 } else {
324 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
325 x86_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
326 }
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100327 // If we also must_do_clinit, the resolved type is now in the correct register.
328 } else {
329 DCHECK(must_do_clinit);
330 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
331 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), source);
332 }
333 if (must_do_clinit) {
334 x86_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
335 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000336 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000337
338 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339 if (out.IsValid()) {
340 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
341 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000342 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000343 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000344 __ jmp(GetExitLabel());
345 }
346
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100347 const char* GetDescription() const override { return "LoadClassSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100348
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000349 private:
350 // The class this slow path will load.
351 HLoadClass* const cls_;
352
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000353 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
354};
355
Andreas Gampe85b62f22015-09-09 13:15:38 -0700356class TypeCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000357 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000358 TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000359 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000360
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100361 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000362 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 DCHECK(instruction_->IsCheckCast()
364 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000365
366 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
367 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000369 if (kPoisonHeapReferences &&
370 instruction_->IsCheckCast() &&
Vladimir Markocde64972023-04-25 16:40:06 +0000371 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000372 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
373 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<Register>());
374 }
375
Vladimir Marko87584542017-12-12 17:47:52 +0000376 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 SaveLiveRegisters(codegen, locations);
378 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000379
380 // We're moving two locations to locations that could overlap, so we need a parallel
381 // move resolver.
382 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800383 x86_codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800384 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100385 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800386 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800387 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100388 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000389 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100390 x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Alexandre Rames8158f282015-08-07 10:26:17 +0100391 instruction_,
392 instruction_->GetDexPc(),
393 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800394 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000395 } else {
396 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800397 x86_codegen->InvokeRuntime(kQuickCheckInstanceOf,
398 instruction_,
399 instruction_->GetDexPc(),
400 this);
401 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000402 }
403
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000404 if (!is_fatal_) {
405 if (instruction_->IsInstanceOf()) {
406 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
407 }
408 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray75374372015-09-17 17:12:19 +0000409
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000410 __ jmp(GetExitLabel());
411 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000412 }
413
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100414 const char* GetDescription() const override { return "TypeCheckSlowPathX86"; }
415 bool IsFatal() const override { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100416
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000417 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000418 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000419
420 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
421};
422
Andreas Gampe85b62f22015-09-09 13:15:38 -0700423class DeoptimizationSlowPathX86 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700424 public:
Aart Bik42249c32016-01-07 15:33:50 -0800425 explicit DeoptimizationSlowPathX86(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700427
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100428 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100429 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700430 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100431 LocationSummary* locations = instruction_->GetLocations();
432 SaveLiveRegisters(codegen, locations);
433 InvokeRuntimeCallingConvention calling_convention;
434 x86_codegen->Load32BitValue(
435 calling_convention.GetRegisterAt(0),
Vladimir Markocde64972023-04-25 16:40:06 +0000436 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100437 x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100438 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700439 }
440
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100441 const char* GetDescription() const override { return "DeoptimizationSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100442
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700443 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700444 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
445};
446
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100447class ArraySetSlowPathX86 : public SlowPathCode {
448 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000449 explicit ArraySetSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100450
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100451 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100452 LocationSummary* locations = instruction_->GetLocations();
453 __ Bind(GetEntryLabel());
454 SaveLiveRegisters(codegen, locations);
455
456 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100457 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100458 parallel_move.AddMove(
459 locations->InAt(0),
460 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100461 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100462 nullptr);
463 parallel_move.AddMove(
464 locations->InAt(1),
465 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100466 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100467 nullptr);
468 parallel_move.AddMove(
469 locations->InAt(2),
470 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100471 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100472 nullptr);
473 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
474
475 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100476 x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000477 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100478 RestoreLiveRegisters(codegen, locations);
479 __ jmp(GetExitLabel());
480 }
481
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100482 const char* GetDescription() const override { return "ArraySetSlowPathX86"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100483
484 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100485 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86);
486};
487
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100488// Slow path marking an object reference `ref` during a read
489// barrier. The field `obj.field` in the object `obj` holding this
490// reference does not get updated by this slow path after marking (see
491// ReadBarrierMarkAndUpdateFieldSlowPathX86 below for that).
492//
493// This means that after the execution of this slow path, `ref` will
494// always be up-to-date, but `obj.field` may not; i.e., after the
495// flip, `ref` will be a to-space reference, but `obj.field` will
496// probably still be a from-space reference (unless it gets updated by
497// another thread, or if another thread installed another object
498// reference (different from `ref`) in `obj.field`).
Roland Levillain7c1559a2015-12-15 10:55:36 +0000499class ReadBarrierMarkSlowPathX86 : public SlowPathCode {
500 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100501 ReadBarrierMarkSlowPathX86(HInstruction* instruction,
502 Location ref,
503 bool unpoison_ref_before_marking)
504 : SlowPathCode(instruction),
505 ref_(ref),
506 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +0000507 DCHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000508 }
509
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100510 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86"; }
Roland Levillain7c1559a2015-12-15 10:55:36 +0000511
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100512 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000513 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100514 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +0000515 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100516 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000517 DCHECK(instruction_->IsInstanceFieldGet() ||
Alex Light3a73ffb2021-01-25 14:11:05 +0000518 instruction_->IsPredicatedInstanceFieldGet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000519 instruction_->IsStaticFieldGet() ||
520 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100521 instruction_->IsArraySet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000522 instruction_->IsLoadClass() ||
523 instruction_->IsLoadString() ||
524 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100525 instruction_->IsCheckCast() ||
Andra Danciu1ca6f322020-08-12 08:58:07 +0000526 (instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000527 << "Unexpected instruction in read barrier marking slow path: "
528 << instruction_->DebugName();
529
530 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100531 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000532 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100533 __ MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000534 }
Roland Levillain4359e612016-07-20 11:32:19 +0100535 // No need to save live registers; it's taken care of by the
536 // entrypoint. Also, there is no need to update the stack mask,
537 // as this runtime call will not trigger a garbage collection.
Roland Levillain7c1559a2015-12-15 10:55:36 +0000538 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100539 DCHECK_NE(ref_reg, ESP);
540 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100541 // "Compact" slow path, saving two moves.
542 //
543 // Instead of using the standard runtime calling convention (input
544 // and output in EAX):
545 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100546 // EAX <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100547 // EAX <- ReadBarrierMark(EAX)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100548 // ref <- EAX
Roland Levillain02b75802016-07-13 11:54:35 +0100549 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100550 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100551 // of a dedicated entrypoint:
552 //
553 // rX <- ReadBarrierMarkRegX(rX)
554 //
Roland Levillain97c46462017-05-11 14:04:03 +0100555 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100556 // This runtime call does not require a stack map.
557 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000558 __ jmp(GetExitLabel());
559 }
560
561 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100562 // The location (register) of the marked object reference.
563 const Location ref_;
564 // Should the reference in `ref_` be unpoisoned prior to marking it?
565 const bool unpoison_ref_before_marking_;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000566
567 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86);
568};
569
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100570// Slow path marking an object reference `ref` during a read barrier,
571// and if needed, atomically updating the field `obj.field` in the
572// object `obj` holding this reference after marking (contrary to
573// ReadBarrierMarkSlowPathX86 above, which never tries to update
574// `obj.field`).
575//
576// This means that after the execution of this slow path, both `ref`
577// and `obj.field` will be up-to-date; i.e., after the flip, both will
578// hold the same to-space reference (unless another thread installed
579// another object reference (different from `ref`) in `obj.field`).
580class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
581 public:
582 ReadBarrierMarkAndUpdateFieldSlowPathX86(HInstruction* instruction,
583 Location ref,
584 Register obj,
585 const Address& field_addr,
586 bool unpoison_ref_before_marking,
587 Register temp)
588 : SlowPathCode(instruction),
589 ref_(ref),
590 obj_(obj),
591 field_addr_(field_addr),
592 unpoison_ref_before_marking_(unpoison_ref_before_marking),
593 temp_(temp) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +0000594 DCHECK(gUseReadBarrier);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100595 }
596
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100597 const char* GetDescription() const override { return "ReadBarrierMarkAndUpdateFieldSlowPathX86"; }
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100598
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100599 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100600 LocationSummary* locations = instruction_->GetLocations();
601 Register ref_reg = ref_.AsRegister<Register>();
602 DCHECK(locations->CanCall());
603 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Andra Danciu5e13d452020-09-08 14:35:09 +0000604 DCHECK((instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100605 << "Unexpected instruction in read barrier marking and field updating slow path: "
606 << instruction_->DebugName();
Vladimir Markocde64972023-04-25 16:40:06 +0000607 HInvoke* invoke = instruction_->AsInvoke();
Ulya Trafimovichec696e52022-01-26 10:21:32 +0000608 DCHECK(IsUnsafeCASObject(invoke) || IsVarHandleCASFamily(invoke)) << invoke->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100609
610 __ Bind(GetEntryLabel());
611 if (unpoison_ref_before_marking_) {
612 // Object* ref = ref_addr->AsMirrorPtr()
613 __ MaybeUnpoisonHeapReference(ref_reg);
614 }
615
616 // Save the old (unpoisoned) reference.
617 __ movl(temp_, ref_reg);
618
619 // No need to save live registers; it's taken care of by the
620 // entrypoint. Also, there is no need to update the stack mask,
621 // as this runtime call will not trigger a garbage collection.
622 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
623 DCHECK_NE(ref_reg, ESP);
624 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
625 // "Compact" slow path, saving two moves.
626 //
627 // Instead of using the standard runtime calling convention (input
628 // and output in EAX):
629 //
630 // EAX <- ref
631 // EAX <- ReadBarrierMark(EAX)
632 // ref <- EAX
633 //
634 // we just use rX (the register containing `ref`) as input and output
635 // of a dedicated entrypoint:
636 //
637 // rX <- ReadBarrierMarkRegX(rX)
638 //
Roland Levillain97c46462017-05-11 14:04:03 +0100639 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100640 // This runtime call does not require a stack map.
641 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
642
643 // If the new reference is different from the old reference,
644 // update the field in the holder (`*field_addr`).
645 //
646 // Note that this field could also hold a different object, if
647 // another thread had concurrently changed it. In that case, the
648 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
649 // operation below would abort the CAS, leaving the field as-is.
650 NearLabel done;
651 __ cmpl(temp_, ref_reg);
652 __ j(kEqual, &done);
653
654 // Update the the holder's field atomically. This may fail if
655 // mutator updates before us, but it's OK. This is achieved
656 // using a strong compare-and-set (CAS) operation with relaxed
657 // memory synchronization ordering, where the expected value is
658 // the old reference and the desired value is the new reference.
659 // This operation is implemented with a 32-bit LOCK CMPXLCHG
660 // instruction, which requires the expected value (the old
661 // reference) to be in EAX. Save EAX beforehand, and move the
662 // expected value (stored in `temp_`) into EAX.
663 __ pushl(EAX);
664 __ movl(EAX, temp_);
665
666 // Convenience aliases.
667 Register base = obj_;
668 Register expected = EAX;
669 Register value = ref_reg;
670
671 bool base_equals_value = (base == value);
672 if (kPoisonHeapReferences) {
673 if (base_equals_value) {
674 // If `base` and `value` are the same register location, move
675 // `value` to a temporary register. This way, poisoning
676 // `value` won't invalidate `base`.
677 value = temp_;
678 __ movl(value, base);
679 }
680
681 // Check that the register allocator did not assign the location
682 // of `expected` (EAX) to `value` nor to `base`, so that heap
683 // poisoning (when enabled) works as intended below.
684 // - If `value` were equal to `expected`, both references would
685 // be poisoned twice, meaning they would not be poisoned at
686 // all, as heap poisoning uses address negation.
687 // - If `base` were equal to `expected`, poisoning `expected`
688 // would invalidate `base`.
689 DCHECK_NE(value, expected);
690 DCHECK_NE(base, expected);
691
692 __ PoisonHeapReference(expected);
693 __ PoisonHeapReference(value);
694 }
695
696 __ LockCmpxchgl(field_addr_, value);
697
698 // If heap poisoning is enabled, we need to unpoison the values
699 // that were poisoned earlier.
700 if (kPoisonHeapReferences) {
701 if (base_equals_value) {
702 // `value` has been moved to a temporary register, no need
703 // to unpoison it.
704 } else {
705 __ UnpoisonHeapReference(value);
706 }
707 // No need to unpoison `expected` (EAX), as it is be overwritten below.
708 }
709
710 // Restore EAX.
711 __ popl(EAX);
712
713 __ Bind(&done);
714 __ jmp(GetExitLabel());
715 }
716
717 private:
718 // The location (register) of the marked object reference.
719 const Location ref_;
720 // The register containing the object holding the marked object reference field.
721 const Register obj_;
722 // The address of the marked reference field. The base of this address must be `obj_`.
723 const Address field_addr_;
724
725 // Should the reference in `ref_` be unpoisoned prior to marking it?
726 const bool unpoison_ref_before_marking_;
727
728 const Register temp_;
729
730 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86);
731};
732
Roland Levillain0d5a2812015-11-13 10:07:31 +0000733// Slow path generating a read barrier for a heap reference.
734class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
735 public:
736 ReadBarrierForHeapReferenceSlowPathX86(HInstruction* instruction,
737 Location out,
738 Location ref,
739 Location obj,
740 uint32_t offset,
741 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000742 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743 out_(out),
744 ref_(ref),
745 obj_(obj),
746 offset_(offset),
747 index_(index) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +0000748 DCHECK(gUseReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000749 // If `obj` is equal to `out` or `ref`, it means the initial object
750 // has been overwritten by (or after) the heap object reference load
751 // to be instrumented, e.g.:
752 //
753 // __ movl(out, Address(out, offset));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000754 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000755 //
756 // In that case, we have lost the information about the original
757 // object, and the emitted read barrier cannot work properly.
758 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
759 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
760 }
761
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100762 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000763 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
764 LocationSummary* locations = instruction_->GetLocations();
765 Register reg_out = out_.AsRegister<Register>();
766 DCHECK(locations->CanCall());
767 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +0100768 DCHECK(instruction_->IsInstanceFieldGet() ||
Alex Light3a73ffb2021-01-25 14:11:05 +0000769 instruction_->IsPredicatedInstanceFieldGet() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100770 instruction_->IsStaticFieldGet() ||
771 instruction_->IsArrayGet() ||
772 instruction_->IsInstanceOf() ||
773 instruction_->IsCheckCast() ||
Vladimir Marko94d2c812020-11-05 10:04:45 +0000774 (instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000775 << "Unexpected instruction in read barrier for heap reference slow path: "
776 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000777
778 __ Bind(GetEntryLabel());
779 SaveLiveRegisters(codegen, locations);
780
781 // We may have to change the index's value, but as `index_` is a
782 // constant member (like other "inputs" of this slow path),
783 // introduce a copy of it, `index`.
784 Location index = index_;
785 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100786 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000787 if (instruction_->IsArrayGet()) {
788 // Compute the actual memory offset and store it in `index`.
789 Register index_reg = index_.AsRegister<Register>();
790 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
791 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
792 // We are about to change the value of `index_reg` (see the
793 // calls to art::x86::X86Assembler::shll and
794 // art::x86::X86Assembler::AddImmediate below), but it has
795 // not been saved by the previous call to
796 // art::SlowPathCode::SaveLiveRegisters, as it is a
797 // callee-save register --
798 // art::SlowPathCode::SaveLiveRegisters does not consider
799 // callee-save registers, as it has been designed with the
800 // assumption that callee-save registers are supposed to be
801 // handled by the called function. So, as a callee-save
802 // register, `index_reg` _would_ eventually be saved onto
803 // the stack, but it would be too late: we would have
804 // changed its value earlier. Therefore, we manually save
805 // it here into another freely available register,
806 // `free_reg`, chosen of course among the caller-save
807 // registers (as a callee-save `free_reg` register would
808 // exhibit the same problem).
809 //
810 // Note we could have requested a temporary register from
811 // the register allocator instead; but we prefer not to, as
812 // this is a slow path, and we know we can find a
813 // caller-save register that is available.
814 Register free_reg = FindAvailableCallerSaveRegister(codegen);
815 __ movl(free_reg, index_reg);
816 index_reg = free_reg;
817 index = Location::RegisterLocation(index_reg);
818 } else {
819 // The initial register stored in `index_` has already been
820 // saved in the call to art::SlowPathCode::SaveLiveRegisters
821 // (as it is not a callee-save register), so we can freely
822 // use it.
823 }
824 // Shifting the index value contained in `index_reg` by the scale
825 // factor (2) cannot overflow in practice, as the runtime is
826 // unable to allocate object arrays with a size larger than
827 // 2^26 - 1 (that is, 2^28 - 4 bytes).
828 __ shll(index_reg, Immediate(TIMES_4));
829 static_assert(
830 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
831 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
832 __ AddImmediate(index_reg, Immediate(offset_));
833 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100834 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
835 // intrinsics, `index_` is not shifted by a scale factor of 2
836 // (as in the case of ArrayGet), as it is actually an offset
837 // to an object field within an object.
838 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000839 DCHECK(instruction_->GetLocations()->Intrinsified());
Vladimir Markocde64972023-04-25 16:40:06 +0000840 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
841 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile) ||
842 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kJdkUnsafeGetObject) ||
843 (instruction_->AsInvoke()->GetIntrinsic() ==
Vladimir Marko5150dbe2023-04-26 09:13:59 +0000844 Intrinsics::kJdkUnsafeGetObjectVolatile) ||
Vladimir Markocde64972023-04-25 16:40:06 +0000845 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kJdkUnsafeGetObjectAcquire))
846 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000847 DCHECK_EQ(offset_, 0U);
848 DCHECK(index_.IsRegisterPair());
849 // UnsafeGet's offset location is a register pair, the low
850 // part contains the correct offset.
851 index = index_.ToLow();
852 }
853 }
854
855 // We're moving two or three locations to locations that could
856 // overlap, so we need a parallel move resolver.
857 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100858 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000859 parallel_move.AddMove(ref_,
860 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100861 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000862 nullptr);
863 parallel_move.AddMove(obj_,
864 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100865 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000866 nullptr);
867 if (index.IsValid()) {
868 parallel_move.AddMove(index,
869 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100870 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000871 nullptr);
872 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
873 } else {
874 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
875 __ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
876 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100877 x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000878 CheckEntrypointTypes<
879 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
880 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
881
882 RestoreLiveRegisters(codegen, locations);
883 __ jmp(GetExitLabel());
884 }
885
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100886 const char* GetDescription() const override { return "ReadBarrierForHeapReferenceSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000887
888 private:
889 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
890 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
891 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
892 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
893 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
894 return static_cast<Register>(i);
895 }
896 }
897 // We shall never fail to find a free caller-save register, as
898 // there are more than two core caller-save registers on x86
899 // (meaning it is possible to find one which is different from
900 // `ref` and `obj`).
901 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
902 LOG(FATAL) << "Could not find a free caller-save register";
903 UNREACHABLE();
904 }
905
Roland Levillain0d5a2812015-11-13 10:07:31 +0000906 const Location out_;
907 const Location ref_;
908 const Location obj_;
909 const uint32_t offset_;
910 // An additional location containing an index to an array.
911 // Only used for HArrayGet and the UnsafeGetObject &
912 // UnsafeGetObjectVolatile intrinsics.
913 const Location index_;
914
915 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86);
916};
917
918// Slow path generating a read barrier for a GC root.
919class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
920 public:
921 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000922 : SlowPathCode(instruction), out_(out), root_(root) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +0000923 DCHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000924 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000925
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100926 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000927 LocationSummary* locations = instruction_->GetLocations();
928 Register reg_out = out_.AsRegister<Register>();
929 DCHECK(locations->CanCall());
930 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000931 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
932 << "Unexpected instruction in read barrier for GC root slow path: "
933 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000934
935 __ Bind(GetEntryLabel());
936 SaveLiveRegisters(codegen, locations);
937
938 InvokeRuntimeCallingConvention calling_convention;
939 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
940 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100941 x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000942 instruction_,
943 instruction_->GetDexPc(),
944 this);
945 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
946 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
947
948 RestoreLiveRegisters(codegen, locations);
949 __ jmp(GetExitLabel());
950 }
951
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100952 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000953
954 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000955 const Location out_;
956 const Location root_;
957
958 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86);
959};
960
Mythri Alle5097f832021-11-02 14:52:30 +0000961class MethodEntryExitHooksSlowPathX86 : public SlowPathCode {
962 public:
963 explicit MethodEntryExitHooksSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
964
965 void EmitNativeCode(CodeGenerator* codegen) override {
966 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
967 LocationSummary* locations = instruction_->GetLocations();
968 QuickEntrypointEnum entry_point =
969 (instruction_->IsMethodEntryHook()) ? kQuickMethodEntryHook : kQuickMethodExitHook;
970 __ Bind(GetEntryLabel());
971 SaveLiveRegisters(codegen, locations);
Mythri Allebab6beb2022-10-21 13:28:05 +0000972 if (instruction_->IsMethodExitHook()) {
973 __ movl(EBX, Immediate(codegen->GetFrameSize()));
974 }
Mythri Alle5097f832021-11-02 14:52:30 +0000975 x86_codegen->InvokeRuntime(entry_point, instruction_, instruction_->GetDexPc(), this);
976 RestoreLiveRegisters(codegen, locations);
977 __ jmp(GetExitLabel());
978 }
979
980 const char* GetDescription() const override {
981 return "MethodEntryExitHooksSlowPath";
982 }
983
984 private:
985 DISALLOW_COPY_AND_ASSIGN(MethodEntryExitHooksSlowPathX86);
986};
987
Nicolas Geoffray9e598902021-11-19 14:53:07 +0000988class CompileOptimizedSlowPathX86 : public SlowPathCode {
989 public:
990 CompileOptimizedSlowPathX86() : SlowPathCode(/* instruction= */ nullptr) {}
991
992 void EmitNativeCode(CodeGenerator* codegen) override {
993 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
994 __ Bind(GetEntryLabel());
995 x86_codegen->GenerateInvokeRuntime(
996 GetThreadOffset<kX86PointerSize>(kQuickCompileOptimized).Int32Value());
997 __ jmp(GetExitLabel());
998 }
999
1000 const char* GetDescription() const override {
1001 return "CompileOptimizedSlowPath";
1002 }
1003
1004 private:
1005 DISALLOW_COPY_AND_ASSIGN(CompileOptimizedSlowPathX86);
1006};
1007
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001008#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001009// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1010#define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001011
Aart Bike9f37602015-10-09 11:15:55 -07001012inline Condition X86Condition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -07001013 switch (cond) {
1014 case kCondEQ: return kEqual;
1015 case kCondNE: return kNotEqual;
1016 case kCondLT: return kLess;
1017 case kCondLE: return kLessEqual;
1018 case kCondGT: return kGreater;
1019 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -07001020 case kCondB: return kBelow;
1021 case kCondBE: return kBelowEqual;
1022 case kCondA: return kAbove;
1023 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -07001024 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001025 LOG(FATAL) << "Unreachable";
1026 UNREACHABLE();
1027}
1028
Aart Bike9f37602015-10-09 11:15:55 -07001029// Maps signed condition to unsigned condition and FP condition to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +01001030inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
1031 switch (cond) {
1032 case kCondEQ: return kEqual;
1033 case kCondNE: return kNotEqual;
Aart Bike9f37602015-10-09 11:15:55 -07001034 // Signed to unsigned, and FP to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +01001035 case kCondLT: return kBelow;
1036 case kCondLE: return kBelowEqual;
1037 case kCondGT: return kAbove;
1038 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -07001039 // Unsigned remain unchanged.
1040 case kCondB: return kBelow;
1041 case kCondBE: return kBelowEqual;
1042 case kCondA: return kAbove;
1043 case kCondAE: return kAboveEqual;
Roland Levillain4fa13f62015-07-06 18:11:54 +01001044 }
1045 LOG(FATAL) << "Unreachable";
1046 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -07001047}
1048
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001049void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001050 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001051}
1052
1053void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001054 stream << XmmRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001055}
1056
Vladimir Markoa0431112018-06-25 09:32:54 +01001057const X86InstructionSetFeatures& CodeGeneratorX86::GetInstructionSetFeatures() const {
1058 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86InstructionSetFeatures();
1059}
1060
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001061size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1062 __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
1063 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001064}
1065
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001066size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1067 __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
1068 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001069}
1070
Mark Mendell7c8d0092015-01-26 11:21:33 -05001071size_t CodeGeneratorX86::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001072 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001073 __ movups(Address(ESP, stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001074 } else {
1075 __ movsd(Address(ESP, stack_index), XmmRegister(reg_id));
1076 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001077 return GetSlowPathFPWidth();
Mark Mendell7c8d0092015-01-26 11:21:33 -05001078}
1079
1080size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001081 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001082 __ movups(XmmRegister(reg_id), Address(ESP, stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001083 } else {
1084 __ movsd(XmmRegister(reg_id), Address(ESP, stack_index));
1085 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001086 return GetSlowPathFPWidth();
Mark Mendell7c8d0092015-01-26 11:21:33 -05001087}
1088
Calin Juravle175dc732015-08-25 15:42:32 +01001089void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
1090 HInstruction* instruction,
1091 uint32_t dex_pc,
1092 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001093 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001094 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
1095 if (EntrypointRequiresStackMap(entrypoint)) {
1096 RecordPcInfo(instruction, dex_pc, slow_path);
1097 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001098}
1099
Roland Levillaindec8f632016-07-22 17:10:06 +01001100void CodeGeneratorX86::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1101 HInstruction* instruction,
1102 SlowPathCode* slow_path) {
1103 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001104 GenerateInvokeRuntime(entry_point_offset);
1105}
1106
1107void CodeGeneratorX86::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001108 __ fs()->call(Address::Absolute(entry_point_offset));
1109}
1110
Santiago Aboy Solanes76d519b2023-04-03 12:34:45 +01001111namespace detail {
1112// Mark which intrinsics we don't have handcrafted code for.
1113template <Intrinsics T>
1114struct IsUnimplemented {
1115 bool is_unimplemented = false;
1116};
1117
1118#define TRUE_OVERRIDE(Name) \
1119 template <> \
1120 struct IsUnimplemented<Intrinsics::k##Name> { \
1121 bool is_unimplemented = true; \
1122 };
1123UNIMPLEMENTED_INTRINSIC_LIST_X86(TRUE_OVERRIDE)
1124#undef TRUE_OVERRIDE
1125
1126#include "intrinsics_list.h"
1127static constexpr bool kIsIntrinsicUnimplemented[] = {
1128 false, // kNone
1129#define IS_UNIMPLEMENTED(Intrinsic, ...) \
1130 IsUnimplemented<Intrinsics::k##Intrinsic>().is_unimplemented,
1131 INTRINSICS_LIST(IS_UNIMPLEMENTED)
1132#undef IS_UNIMPLEMENTED
1133};
1134#undef INTRINSICS_LIST
1135
1136} // namespace detail
1137
Mark Mendellfb8d2792015-03-31 22:16:59 -04001138CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001139 const CompilerOptions& compiler_options,
1140 OptimizingCompilerStats* stats)
Mark Mendell5f874182015-03-04 15:42:45 -05001141 : CodeGenerator(graph,
1142 kNumberOfCpuRegisters,
1143 kNumberOfXmmRegisters,
1144 kNumberOfRegisterPairs,
1145 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1146 arraysize(kCoreCalleeSaves))
1147 | (1 << kFakeReturnRegister),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001148 0,
1149 compiler_options,
Santiago Aboy Solanes76d519b2023-04-03 12:34:45 +01001150 stats,
1151 ArrayRef<const bool>(detail::kIsIntrinsicUnimplemented)),
Vladimir Marko225b6462015-09-28 12:17:40 +01001152 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001153 location_builder_(graph, this),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001154 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001155 move_resolver_(graph->GetAllocator(), this),
Shalini Salomi Bodapati6545ee32021-11-02 20:01:06 +05301156 assembler_(graph->GetAllocator(),
1157 compiler_options.GetInstructionSetFeatures()->AsX86InstructionSetFeatures()),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001158 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1159 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1160 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1161 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko8f63f102020-09-28 12:10:28 +01001162 public_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1163 package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001164 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001165 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoeb9eb002020-10-02 13:54:19 +01001166 boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001167 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001168 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1169 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko93205e32016-04-13 11:59:46 +01001170 constant_area_start_(-1),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001171 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001172 method_address_offset_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001173 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001174 // Use a fake return address register to mimic Quick.
1175 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001176}
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001177
David Brazdil58282f42016-01-14 12:45:10 +00001178void CodeGeneratorX86::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001179 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001180 blocked_core_registers_[ESP] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001181}
1182
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001183InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001184 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001185 assembler_(codegen->GetAssembler()),
1186 codegen_(codegen) {}
1187
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001188static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001189 return dwarf::Reg::X86Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001190}
1191
Mythri Alle5097f832021-11-02 14:52:30 +00001192void SetInForReturnValue(HInstruction* ret, LocationSummary* locations) {
1193 switch (ret->InputAt(0)->GetType()) {
1194 case DataType::Type::kReference:
1195 case DataType::Type::kBool:
1196 case DataType::Type::kUint8:
1197 case DataType::Type::kInt8:
1198 case DataType::Type::kUint16:
1199 case DataType::Type::kInt16:
1200 case DataType::Type::kInt32:
1201 locations->SetInAt(0, Location::RegisterLocation(EAX));
1202 break;
1203
1204 case DataType::Type::kInt64:
1205 locations->SetInAt(0, Location::RegisterPairLocation(EAX, EDX));
1206 break;
1207
1208 case DataType::Type::kFloat32:
1209 case DataType::Type::kFloat64:
1210 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
1211 break;
1212
1213 case DataType::Type::kVoid:
1214 locations->SetInAt(0, Location::NoLocation());
1215 break;
1216
1217 default:
1218 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1219 }
1220}
1221
1222void LocationsBuilderX86::VisitMethodExitHook(HMethodExitHook* method_hook) {
1223 LocationSummary* locations = new (GetGraph()->GetAllocator())
1224 LocationSummary(method_hook, LocationSummary::kCallOnSlowPath);
1225 SetInForReturnValue(method_hook, locations);
Mythri Alle98aefe02023-02-27 18:50:44 +00001226 // We use rdtsc to obtain a timestamp for tracing. rdtsc returns the results in EAX + EDX.
1227 locations->AddTemp(Location::RegisterLocation(EAX));
1228 locations->AddTemp(Location::RegisterLocation(EDX));
1229 // An additional temporary register to hold address to store the timestamp counter.
1230 locations->AddTemp(Location::RequiresRegister());
Mythri Alle5097f832021-11-02 14:52:30 +00001231}
1232
1233void InstructionCodeGeneratorX86::GenerateMethodEntryExitHook(HInstruction* instruction) {
1234 SlowPathCode* slow_path =
1235 new (codegen_->GetScopedAllocator()) MethodEntryExitHooksSlowPathX86(instruction);
1236 codegen_->AddSlowPath(slow_path);
Mythri Alle98aefe02023-02-27 18:50:44 +00001237 LocationSummary* locations = instruction->GetLocations();
Mythri Alle5097f832021-11-02 14:52:30 +00001238
Mythri Alle5eff6b32022-11-04 10:57:53 +00001239 if (instruction->IsMethodExitHook()) {
1240 // Check if we are required to check if the caller needs a deoptimization. Strictly speaking it
1241 // would be sufficient to check if CheckCallerForDeopt bit is set. Though it is faster to check
1242 // if it is just non-zero. kCHA bit isn't used in debuggable runtimes as cha optimization is
1243 // disabled in debuggable runtime. The other bit is used when this method itself requires a
1244 // deoptimization due to redefinition. So it is safe to just check for non-zero value here.
1245 __ cmpl(Address(ESP, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1246 __ j(kNotEqual, slow_path->GetEntryLabel());
1247 }
1248
Mythri Alle5097f832021-11-02 14:52:30 +00001249 uint64_t address = reinterpret_cast64<uint64_t>(Runtime::Current()->GetInstrumentation());
Mythri Alle5b263ae2022-10-21 15:07:44 +00001250 MemberOffset offset = instruction->IsMethodExitHook() ?
Mythri Alle5eff6b32022-11-04 10:57:53 +00001251 instrumentation::Instrumentation::HaveMethodExitListenersOffset() :
Mythri Alle5b263ae2022-10-21 15:07:44 +00001252 instrumentation::Instrumentation::HaveMethodEntryListenersOffset();
Mythri Alle98aefe02023-02-27 18:50:44 +00001253 __ cmpb(Address::Absolute(address + offset.Int32Value()),
1254 Immediate(instrumentation::Instrumentation::kFastTraceListeners));
1255 // Check if there are any trace method entry / exit listeners. If no, continue.
1256 __ j(kLess, slow_path->GetExitLabel());
1257 // Check if there are any slow (jvmti / trace with thread cpu time) method entry / exit listeners.
1258 // If yes, just take the slow path.
1259 __ j(kGreater, slow_path->GetEntryLabel());
1260
1261 // Check if there is place in the buffer for a new entry, if no, take slow path.
1262 uint64_t trace_buffer_index_addr = Thread::TraceBufferIndexOffset<kX86PointerSize>().Int32Value();
1263 __ fs()->cmpl(Address::Absolute(trace_buffer_index_addr), Immediate(kNumEntriesForWallClock));
1264 __ j(kLess, slow_path->GetEntryLabel());
1265
1266 // Just update the buffer and advance the offset
1267 // For entry_addr use the first temp that isn't EAX or EDX. We need this after
1268 // rdtsc which returns values in EAX + EDX.
1269 Register entry_addr = locations->GetTemp(2).AsRegister<Register>();
1270 Register index = locations->GetTemp(1).AsRegister<Register>();
1271 uint32_t trace_buffer_ptr = Thread::TraceBufferPtrOffset<kX86PointerSize>().Int32Value();
1272 // entry_addr = base_addr + sizeof(void*) * index
1273 __ fs()->movl(index, Address::Absolute(trace_buffer_index_addr));
1274 __ fs()->movl(entry_addr, Address::Absolute(trace_buffer_ptr));
1275 __ leal(entry_addr, Address(entry_addr, index, TIMES_4, 0));
1276 // Advance the index in the buffer
1277 __ subl(index, Immediate(kNumEntriesForWallClock));
1278 __ fs()->movl(Address::Absolute(trace_buffer_index_addr), index);
1279
Mythri Alle27ceaaa2023-05-16 10:44:27 +00001280 // Record method pointer and trace action.
Mythri Alle98aefe02023-02-27 18:50:44 +00001281 Register method = index;
1282 __ movl(method, Address(ESP, kCurrentMethodStackOffset));
Mythri Alle27ceaaa2023-05-16 10:44:27 +00001283 // Use last two bits to encode trace method action. For MethodEntry it is 0
1284 // so no need to set the bits since they are 0 already.
1285 if (instruction->IsMethodExitHook()) {
1286 DCHECK_GE(ArtMethod::Alignment(kRuntimePointerSize), static_cast<size_t>(4));
1287 uint32_t trace_action = 1;
1288 __ orl(method, Immediate(trace_action));
1289 }
Mythri Alle98aefe02023-02-27 18:50:44 +00001290 __ movl(Address(entry_addr, kMethodOffsetInBytes), method);
Mythri Alle98aefe02023-02-27 18:50:44 +00001291 // Get the timestamp. rdtsc returns timestamp in EAX + EDX.
1292 __ rdtsc();
1293 __ movl(Address(entry_addr, kTimestampOffsetInBytes), EDX);
1294 __ movl(Address(entry_addr, kLowTimestampOffsetInBytes), EAX);
Mythri Alle5097f832021-11-02 14:52:30 +00001295 __ Bind(slow_path->GetExitLabel());
1296}
1297
1298void InstructionCodeGeneratorX86::VisitMethodExitHook(HMethodExitHook* instruction) {
1299 DCHECK(codegen_->GetCompilerOptions().IsJitCompiler() && GetGraph()->IsDebuggable());
1300 DCHECK(codegen_->RequiresCurrentMethod());
1301 GenerateMethodEntryExitHook(instruction);
1302}
1303
1304void LocationsBuilderX86::VisitMethodEntryHook(HMethodEntryHook* method_hook) {
Mythri Alle98aefe02023-02-27 18:50:44 +00001305 LocationSummary* locations = new (GetGraph()->GetAllocator())
1306 LocationSummary(method_hook, LocationSummary::kCallOnSlowPath);
1307 // We use rdtsc to obtain a timestamp for tracing. rdtsc returns the results in EAX + EDX.
1308 locations->AddTemp(Location::RegisterLocation(EAX));
1309 locations->AddTemp(Location::RegisterLocation(EDX));
1310 // An additional temporary register to hold address to store the timestamp counter.
1311 locations->AddTemp(Location::RequiresRegister());
Mythri Alle5097f832021-11-02 14:52:30 +00001312}
1313
1314void InstructionCodeGeneratorX86::VisitMethodEntryHook(HMethodEntryHook* instruction) {
1315 DCHECK(codegen_->GetCompilerOptions().IsJitCompiler() && GetGraph()->IsDebuggable());
1316 DCHECK(codegen_->RequiresCurrentMethod());
1317 GenerateMethodEntryExitHook(instruction);
1318}
1319
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001320void CodeGeneratorX86::MaybeIncrementHotness(bool is_frame_entry) {
1321 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1322 Register reg = EAX;
1323 if (is_frame_entry) {
1324 reg = kMethodRegisterArgument;
1325 } else {
1326 __ pushl(EAX);
Vladimir Markodec78172020-06-19 15:31:23 +01001327 __ cfi().AdjustCFAOffset(4);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001328 __ movl(EAX, Address(ESP, kX86WordSize));
1329 }
1330 NearLabel overflow;
1331 __ cmpw(Address(reg, ArtMethod::HotnessCountOffset().Int32Value()),
Nicolas Geoffray61673dc2021-11-06 13:58:31 +00001332 Immediate(interpreter::kNterpHotnessValue));
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001333 __ j(kEqual, &overflow);
Nicolas Geoffray61673dc2021-11-06 13:58:31 +00001334 __ addw(Address(reg, ArtMethod::HotnessCountOffset().Int32Value()), Immediate(-1));
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001335 __ Bind(&overflow);
1336 if (!is_frame_entry) {
1337 __ popl(EAX);
Vladimir Markodec78172020-06-19 15:31:23 +01001338 __ cfi().AdjustCFAOffset(-4);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001339 }
1340 }
1341
1342 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray9e598902021-11-19 14:53:07 +00001343 SlowPathCode* slow_path = new (GetScopedAllocator()) CompileOptimizedSlowPathX86();
1344 AddSlowPath(slow_path);
1345 ProfilingInfo* info = GetGraph()->GetProfilingInfo();
1346 DCHECK(info != nullptr);
1347 uint32_t address = reinterpret_cast32<uint32_t>(info) +
1348 ProfilingInfo::BaselineHotnessCountOffset().Int32Value();
1349 DCHECK(!HasEmptyFrame());
1350 // With multiple threads, this can overflow. This is OK, we will eventually get to see
1351 // it reaching 0. Also, at this point we have no register available to look
1352 // at the counter directly.
1353 __ addw(Address::Absolute(address), Immediate(-1));
1354 __ j(kEqual, slow_path->GetEntryLabel());
1355 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001356 }
1357}
1358
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001359void CodeGeneratorX86::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001360 __ cfi().SetCurrentCFAOffset(kX86WordSize); // return address
Nicolas Geoffrayab3f8d22022-07-23 15:49:51 +00001361
1362 // Check if we need to generate the clinit check. We will jump to the
1363 // resolution stub if the class is not initialized and the executing thread is
1364 // not the thread initializing it.
1365 // We do this before constructing the frame to get the correct stack trace if
1366 // an exception is thrown.
1367 if (GetCompilerOptions().ShouldCompileWithClinitCheck(GetGraph()->GetArtMethod())) {
1368 NearLabel continue_execution, resolution;
1369 // We'll use EBP as temporary.
1370 __ pushl(EBP);
1371 // Check if we're visibly initialized.
1372
1373 // We don't emit a read barrier here to save on code size. We rely on the
1374 // resolution trampoline to do a suspend check before re-entering this code.
1375 __ movl(EBP, Address(kMethodRegisterArgument, ArtMethod::DeclaringClassOffset().Int32Value()));
1376 __ cmpb(Address(EBP, status_byte_offset), Immediate(shifted_visibly_initialized_value));
1377 __ j(kAboveEqual, &continue_execution);
1378
1379 // Check if we're initializing and the thread initializing is the one
1380 // executing the code.
1381 __ cmpb(Address(EBP, status_byte_offset), Immediate(shifted_initializing_value));
1382 __ j(kBelow, &resolution);
1383
1384 __ movl(EBP, Address(EBP, mirror::Class::ClinitThreadIdOffset().Int32Value()));
1385 __ fs()->cmpl(EBP, Address::Absolute(Thread::TidOffset<kX86PointerSize>().Int32Value()));
1386 __ j(kEqual, &continue_execution);
1387 __ Bind(&resolution);
1388
1389 __ popl(EBP);
1390 // Jump to the resolution stub.
1391 ThreadOffset32 entrypoint_offset =
1392 GetThreadOffset<kX86PointerSize>(kQuickQuickResolutionTrampoline);
1393 __ fs()->jmp(Address::Absolute(entrypoint_offset));
1394
1395 __ Bind(&continue_execution);
1396 __ popl(EBP);
1397 }
1398
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001399 __ Bind(&frame_entry_label_);
Roland Levillain199f3362014-11-27 17:15:16 +00001400 bool skip_overflow_check =
1401 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001402 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Calin Juravle93edf732015-01-20 20:14:07 +00001403
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001404 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001405 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86);
1406 __ testl(EAX, Address(ESP, -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001407 RecordPcInfo(nullptr, 0);
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001408 }
1409
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001410 if (!HasEmptyFrame()) {
1411 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1412 Register reg = kCoreCalleeSaves[i];
1413 if (allocated_registers_.ContainsCoreRegister(reg)) {
1414 __ pushl(reg);
1415 __ cfi().AdjustCFAOffset(kX86WordSize);
1416 __ cfi().RelOffset(DWARFReg(reg), 0);
1417 }
1418 }
Mark Mendell5f874182015-03-04 15:42:45 -05001419
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001420 int adjust = GetFrameSize() - FrameEntrySpillSize();
Vladimir Markodec78172020-06-19 15:31:23 +01001421 IncreaseFrame(adjust);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001422 // Save the current method if we need it. Note that we do not
1423 // do this in HCurrentMethod, as the instruction might have been removed
1424 // in the SSA graph.
1425 if (RequiresCurrentMethod()) {
1426 __ movl(Address(ESP, kCurrentMethodStackOffset), kMethodRegisterArgument);
1427 }
1428
1429 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1430 // Initialize should_deoptimize flag to 0.
1431 __ movl(Address(ESP, GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
Mark Mendell5f874182015-03-04 15:42:45 -05001432 }
1433 }
1434
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001435 MaybeIncrementHotness(/* is_frame_entry= */ true);
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001436}
1437
1438void CodeGeneratorX86::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001439 __ cfi().RememberState();
1440 if (!HasEmptyFrame()) {
1441 int adjust = GetFrameSize() - FrameEntrySpillSize();
Vladimir Markodec78172020-06-19 15:31:23 +01001442 DecreaseFrame(adjust);
Mark Mendell5f874182015-03-04 15:42:45 -05001443
David Srbeckyc34dc932015-04-12 09:27:43 +01001444 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1445 Register reg = kCoreCalleeSaves[i];
1446 if (allocated_registers_.ContainsCoreRegister(reg)) {
1447 __ popl(reg);
1448 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86WordSize));
1449 __ cfi().Restore(DWARFReg(reg));
1450 }
Mark Mendell5f874182015-03-04 15:42:45 -05001451 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001452 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001453 __ ret();
1454 __ cfi().RestoreState();
1455 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001456}
1457
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001458void CodeGeneratorX86::Bind(HBasicBlock* block) {
1459 __ Bind(GetLabelOf(block));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001460}
1461
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001462Location InvokeDexCallingConventionVisitorX86::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001463 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001464 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001465 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001466 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001467 case DataType::Type::kInt8:
1468 case DataType::Type::kUint16:
1469 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08001470 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001471 case DataType::Type::kInt32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001472 return Location::RegisterLocation(EAX);
1473
Aart Bik66c158e2018-01-31 12:55:04 -08001474 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001475 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001476 return Location::RegisterPairLocation(EAX, EDX);
1477
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001478 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001479 return Location::NoLocation();
1480
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001481 case DataType::Type::kFloat64:
1482 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001483 return Location::FpuRegisterLocation(XMM0);
1484 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01001485
1486 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001487}
1488
1489Location InvokeDexCallingConventionVisitorX86::GetMethodLocation() const {
1490 return Location::RegisterLocation(kMethodRegisterArgument);
1491}
1492
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001493Location InvokeDexCallingConventionVisitorX86::GetNextLocation(DataType::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001494 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001495 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001496 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001497 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001498 case DataType::Type::kInt8:
1499 case DataType::Type::kUint16:
1500 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001501 case DataType::Type::kInt32: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001502 uint32_t index = gp_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001503 stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001504 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001505 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001506 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001507 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001508 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001509 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001510
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001511 case DataType::Type::kInt64: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001512 uint32_t index = gp_index_;
1513 gp_index_ += 2;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001514 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001515 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001516 X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
1517 calling_convention.GetRegisterPairAt(index));
1518 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001519 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001520 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
1521 }
1522 }
1523
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001524 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001525 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001526 stack_index_++;
1527 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1528 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1529 } else {
1530 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
1531 }
1532 }
1533
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001534 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001535 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001536 stack_index_ += 2;
1537 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1538 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1539 } else {
1540 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001541 }
1542 }
1543
Aart Bik66c158e2018-01-31 12:55:04 -08001544 case DataType::Type::kUint32:
1545 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001546 case DataType::Type::kVoid:
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001547 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08001548 UNREACHABLE();
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001549 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001550 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001551}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001552
Vladimir Marko86c87522020-05-11 16:55:55 +01001553Location CriticalNativeCallingConventionVisitorX86::GetNextLocation(DataType::Type type) {
1554 DCHECK_NE(type, DataType::Type::kReference);
1555
1556 Location location;
1557 if (DataType::Is64BitType(type)) {
1558 location = Location::DoubleStackSlot(stack_offset_);
1559 stack_offset_ += 2 * kFramePointerSize;
1560 } else {
1561 location = Location::StackSlot(stack_offset_);
1562 stack_offset_ += kFramePointerSize;
1563 }
1564 if (for_register_allocation_) {
1565 location = Location::Any();
1566 }
1567 return location;
1568}
1569
1570Location CriticalNativeCallingConventionVisitorX86::GetReturnLocation(DataType::Type type) const {
1571 // We perform conversion to the managed ABI return register after the call if needed.
1572 InvokeDexCallingConventionVisitorX86 dex_calling_convention;
1573 return dex_calling_convention.GetReturnLocation(type);
1574}
1575
1576Location CriticalNativeCallingConventionVisitorX86::GetMethodLocation() const {
1577 // Pass the method in the hidden argument EAX.
1578 return Location::RegisterLocation(EAX);
1579}
1580
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001581void CodeGeneratorX86::Move32(Location destination, Location source) {
1582 if (source.Equals(destination)) {
1583 return;
1584 }
1585 if (destination.IsRegister()) {
1586 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001587 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001588 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001589 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
Andra Danciu5e13d452020-09-08 14:35:09 +00001590 } else if (source.IsConstant()) {
1591 int32_t value = GetInt32ValueOf(source.GetConstant());
1592 __ movl(destination.AsRegister<Register>(), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001593 } else {
1594 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001595 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001596 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001597 } else if (destination.IsFpuRegister()) {
1598 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001599 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001600 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001601 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001602 } else {
1603 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001604 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001605 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001606 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001607 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001608 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001609 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001610 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001611 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05001612 } else if (source.IsConstant()) {
1613 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001614 int32_t value = GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05001615 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001616 } else {
1617 DCHECK(source.IsStackSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001618 __ pushl(Address(ESP, source.GetStackIndex()));
1619 __ popl(Address(ESP, destination.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001620 }
1621 }
1622}
1623
1624void CodeGeneratorX86::Move64(Location destination, Location source) {
1625 if (source.Equals(destination)) {
1626 return;
1627 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001628 if (destination.IsRegisterPair()) {
1629 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001630 EmitParallelMoves(
1631 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
1632 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001633 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001634 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001635 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001636 DataType::Type::kInt32);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001637 } else if (source.IsFpuRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001638 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
1639 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
1640 __ psrlq(src_reg, Immediate(32));
1641 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001642 } else {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001643 // No conflict possible, so just do the moves.
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001644 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001645 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
1646 __ movl(destination.AsRegisterPairHigh<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001647 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
1648 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001649 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05001650 if (source.IsFpuRegister()) {
1651 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
1652 } else if (source.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001653 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001654 } else if (source.IsRegisterPair()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001655 size_t elem_size = DataType::Size(DataType::Type::kInt32);
Vladimir Markodec78172020-06-19 15:31:23 +01001656 // Push the 2 source registers to the stack.
1657 __ pushl(source.AsRegisterPairHigh<Register>());
1658 __ cfi().AdjustCFAOffset(elem_size);
1659 __ pushl(source.AsRegisterPairLow<Register>());
1660 __ cfi().AdjustCFAOffset(elem_size);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001661 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
1662 // And remove the temporary stack space we allocated.
Vladimir Markodec78172020-06-19 15:31:23 +01001663 DecreaseFrame(2 * elem_size);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001664 } else {
1665 LOG(FATAL) << "Unimplemented";
1666 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001667 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001668 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001669 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001670 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001671 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001672 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001673 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001674 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001675 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001676 } else if (source.IsConstant()) {
1677 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001678 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1679 int64_t value = GetInt64ValueOf(constant);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001680 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(Low32Bits(value)));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001681 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
1682 Immediate(High32Bits(value)));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001683 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001684 DCHECK(source.IsDoubleStackSlot()) << source;
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001685 EmitParallelMoves(
1686 Location::StackSlot(source.GetStackIndex()),
1687 Location::StackSlot(destination.GetStackIndex()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001688 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001689 Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001690 Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001691 DataType::Type::kInt32);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001692 }
1693 }
1694}
1695
Andra Danciu1ca6f322020-08-12 08:58:07 +00001696static Address CreateAddress(Register base,
1697 Register index = Register::kNoRegister,
1698 ScaleFactor scale = TIMES_1,
1699 int32_t disp = 0) {
1700 if (index == Register::kNoRegister) {
1701 return Address(base, disp);
1702 }
1703
1704 return Address(base, index, scale, disp);
1705}
1706
Andra Danciud0f71f22020-09-17 09:00:15 +00001707void CodeGeneratorX86::LoadFromMemoryNoBarrier(DataType::Type dst_type,
1708 Location dst,
1709 Address src,
Ulya Trafimovich322eced2021-06-02 15:39:36 +01001710 HInstruction* instr,
Andra Danciud0f71f22020-09-17 09:00:15 +00001711 XmmRegister temp,
1712 bool is_atomic_load) {
Andra Danciu1ca6f322020-08-12 08:58:07 +00001713 switch (dst_type) {
1714 case DataType::Type::kBool:
1715 case DataType::Type::kUint8:
1716 __ movzxb(dst.AsRegister<Register>(), src);
1717 break;
1718 case DataType::Type::kInt8:
1719 __ movsxb(dst.AsRegister<Register>(), src);
1720 break;
1721 case DataType::Type::kInt16:
1722 __ movsxw(dst.AsRegister<Register>(), src);
1723 break;
1724 case DataType::Type::kUint16:
1725 __ movzxw(dst.AsRegister<Register>(), src);
1726 break;
1727 case DataType::Type::kInt32:
Andra Danciu1ca6f322020-08-12 08:58:07 +00001728 __ movl(dst.AsRegister<Register>(), src);
1729 break;
Andra Danciud0f71f22020-09-17 09:00:15 +00001730 case DataType::Type::kInt64: {
1731 if (is_atomic_load) {
1732 __ movsd(temp, src);
Ulya Trafimovich322eced2021-06-02 15:39:36 +01001733 if (instr != nullptr) {
1734 MaybeRecordImplicitNullCheck(instr);
1735 }
Andra Danciud0f71f22020-09-17 09:00:15 +00001736 __ movd(dst.AsRegisterPairLow<Register>(), temp);
1737 __ psrlq(temp, Immediate(32));
1738 __ movd(dst.AsRegisterPairHigh<Register>(), temp);
1739 } else {
1740 DCHECK_NE(src.GetBaseRegister(), dst.AsRegisterPairLow<Register>());
Ulya Trafimovich893e2ed2021-06-10 16:18:12 +01001741 Address src_high = Address::displace(src, kX86WordSize);
Andra Danciud0f71f22020-09-17 09:00:15 +00001742 __ movl(dst.AsRegisterPairLow<Register>(), src);
Ulya Trafimovich322eced2021-06-02 15:39:36 +01001743 if (instr != nullptr) {
1744 MaybeRecordImplicitNullCheck(instr);
1745 }
Andra Danciud0f71f22020-09-17 09:00:15 +00001746 __ movl(dst.AsRegisterPairHigh<Register>(), src_high);
1747 }
Andra Danciu1ca6f322020-08-12 08:58:07 +00001748 break;
1749 }
1750 case DataType::Type::kFloat32:
1751 __ movss(dst.AsFpuRegister<XmmRegister>(), src);
1752 break;
1753 case DataType::Type::kFloat64:
1754 __ movsd(dst.AsFpuRegister<XmmRegister>(), src);
1755 break;
Andra Danciu1ca6f322020-08-12 08:58:07 +00001756 case DataType::Type::kReference:
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00001757 DCHECK(!gUseReadBarrier);
Andra Danciud0f71f22020-09-17 09:00:15 +00001758 __ movl(dst.AsRegister<Register>(), src);
1759 __ MaybeUnpoisonHeapReference(dst.AsRegister<Register>());
1760 break;
1761 default:
Andra Danciu1ca6f322020-08-12 08:58:07 +00001762 LOG(FATAL) << "Unreachable type " << dst_type;
1763 }
Ulya Trafimovich322eced2021-06-02 15:39:36 +01001764 if (instr != nullptr && dst_type != DataType::Type::kInt64) {
1765 // kInt64 needs special handling that is done in the above switch.
1766 MaybeRecordImplicitNullCheck(instr);
1767 }
Andra Danciu1ca6f322020-08-12 08:58:07 +00001768}
1769
Andra Danciu73c31802020-09-01 13:17:05 +00001770void CodeGeneratorX86::MoveToMemory(DataType::Type src_type,
1771 Location src,
1772 Register dst_base,
1773 Register dst_index,
1774 ScaleFactor dst_scale,
1775 int32_t dst_disp) {
1776 DCHECK(dst_base != Register::kNoRegister);
1777 Address dst = CreateAddress(dst_base, dst_index, dst_scale, dst_disp);
1778
1779 switch (src_type) {
1780 case DataType::Type::kBool:
1781 case DataType::Type::kUint8:
1782 case DataType::Type::kInt8: {
1783 if (src.IsConstant()) {
1784 __ movb(dst, Immediate(CodeGenerator::GetInt8ValueOf(src.GetConstant())));
1785 } else {
1786 __ movb(dst, src.AsRegister<ByteRegister>());
1787 }
1788 break;
1789 }
1790 case DataType::Type::kUint16:
1791 case DataType::Type::kInt16: {
1792 if (src.IsConstant()) {
1793 __ movw(dst, Immediate(CodeGenerator::GetInt16ValueOf(src.GetConstant())));
1794 } else {
1795 __ movw(dst, src.AsRegister<Register>());
1796 }
1797 break;
1798 }
1799 case DataType::Type::kUint32:
1800 case DataType::Type::kInt32: {
1801 if (src.IsConstant()) {
1802 int32_t v = CodeGenerator::GetInt32ValueOf(src.GetConstant());
1803 __ movl(dst, Immediate(v));
1804 } else {
1805 __ movl(dst, src.AsRegister<Register>());
1806 }
1807 break;
1808 }
1809 case DataType::Type::kUint64:
1810 case DataType::Type::kInt64: {
1811 Address dst_next_4_bytes = CreateAddress(dst_base, dst_index, dst_scale, dst_disp + 4);
1812 if (src.IsConstant()) {
1813 int64_t v = CodeGenerator::GetInt64ValueOf(src.GetConstant());
1814 __ movl(dst, Immediate(Low32Bits(v)));
1815 __ movl(dst_next_4_bytes, Immediate(High32Bits(v)));
1816 } else {
1817 __ movl(dst, src.AsRegisterPairLow<Register>());
1818 __ movl(dst_next_4_bytes, src.AsRegisterPairHigh<Register>());
1819 }
1820 break;
1821 }
1822 case DataType::Type::kFloat32: {
1823 if (src.IsConstant()) {
1824 int32_t v = CodeGenerator::GetInt32ValueOf(src.GetConstant());
1825 __ movl(dst, Immediate(v));
1826 } else {
1827 __ movss(dst, src.AsFpuRegister<XmmRegister>());
1828 }
1829 break;
1830 }
1831 case DataType::Type::kFloat64: {
1832 Address dst_next_4_bytes = CreateAddress(dst_base, dst_index, dst_scale, dst_disp + 4);
1833 if (src.IsConstant()) {
1834 int64_t v = CodeGenerator::GetInt64ValueOf(src.GetConstant());
1835 __ movl(dst, Immediate(Low32Bits(v)));
1836 __ movl(dst_next_4_bytes, Immediate(High32Bits(v)));
1837 } else {
1838 __ movsd(dst, src.AsFpuRegister<XmmRegister>());
1839 }
1840 break;
1841 }
1842 case DataType::Type::kVoid:
1843 case DataType::Type::kReference:
1844 LOG(FATAL) << "Unreachable type " << src_type;
1845 }
1846}
1847
Calin Juravle175dc732015-08-25 15:42:32 +01001848void CodeGeneratorX86::MoveConstant(Location location, int32_t value) {
1849 DCHECK(location.IsRegister());
1850 __ movl(location.AsRegister<Register>(), Immediate(value));
1851}
1852
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001853void CodeGeneratorX86::MoveLocation(Location dst, Location src, DataType::Type dst_type) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001854 HParallelMove move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001855 if (dst_type == DataType::Type::kInt64 && !src.IsConstant() && !src.IsFpuRegister()) {
1856 move.AddMove(src.ToLow(), dst.ToLow(), DataType::Type::kInt32, nullptr);
1857 move.AddMove(src.ToHigh(), dst.ToHigh(), DataType::Type::kInt32, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001858 } else {
David Brazdil74eb1b22015-12-14 11:44:01 +00001859 move.AddMove(src, dst, dst_type, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001860 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001861 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001862}
1863
1864void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
1865 if (location.IsRegister()) {
1866 locations->AddTemp(location);
1867 } else if (location.IsRegisterPair()) {
1868 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1869 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
1870 } else {
1871 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1872 }
1873}
1874
David Brazdilfc6a86a2015-06-26 10:33:45 +00001875void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001876 if (successor->IsExitBlock()) {
1877 DCHECK(got->GetPrevious()->AlwaysThrows());
1878 return; // no code needed
1879 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001880
1881 HBasicBlock* block = got->GetBlock();
1882 HInstruction* previous = got->GetPrevious();
1883
1884 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001885 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001886 codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001887 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1888 return;
1889 }
1890
1891 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
Vladimir Markocde64972023-04-25 16:40:06 +00001892 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001893 }
1894 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001895 __ jmp(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001896 }
1897}
1898
David Brazdilfc6a86a2015-06-26 10:33:45 +00001899void LocationsBuilderX86::VisitGoto(HGoto* got) {
1900 got->SetLocations(nullptr);
1901}
1902
1903void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
1904 HandleGoto(got, got->GetSuccessor());
1905}
1906
1907void LocationsBuilderX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1908 try_boundary->SetLocations(nullptr);
1909}
1910
1911void InstructionCodeGeneratorX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1912 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1913 if (!successor->IsExitBlock()) {
1914 HandleGoto(try_boundary, successor);
1915 }
1916}
1917
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001918void LocationsBuilderX86::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001919 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001920}
1921
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00001922void InstructionCodeGeneratorX86::VisitExit([[maybe_unused]] HExit* exit) {}
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001923
Mark Mendell152408f2015-12-31 12:28:50 -05001924template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001925void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001926 LabelType* true_label,
1927 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001928 if (cond->IsFPConditionTrueIfNaN()) {
1929 __ j(kUnordered, true_label);
1930 } else if (cond->IsFPConditionFalseIfNaN()) {
1931 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001932 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001933 __ j(X86UnsignedOrFPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001934}
1935
Mark Mendell152408f2015-12-31 12:28:50 -05001936template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001937void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001938 LabelType* true_label,
1939 LabelType* false_label) {
Mark Mendellc4701932015-04-10 13:18:51 -04001940 LocationSummary* locations = cond->GetLocations();
1941 Location left = locations->InAt(0);
1942 Location right = locations->InAt(1);
1943 IfCondition if_cond = cond->GetCondition();
1944
Mark Mendellc4701932015-04-10 13:18:51 -04001945 Register left_high = left.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001946 Register left_low = left.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001947 IfCondition true_high_cond = if_cond;
1948 IfCondition false_high_cond = cond->GetOppositeCondition();
Aart Bike9f37602015-10-09 11:15:55 -07001949 Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
Mark Mendellc4701932015-04-10 13:18:51 -04001950
1951 // Set the conditions for the test, remembering that == needs to be
1952 // decided using the low words.
1953 switch (if_cond) {
1954 case kCondEQ:
Mark Mendellc4701932015-04-10 13:18:51 -04001955 case kCondNE:
Roland Levillain4fa13f62015-07-06 18:11:54 +01001956 // Nothing to do.
Mark Mendellc4701932015-04-10 13:18:51 -04001957 break;
1958 case kCondLT:
1959 false_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001960 break;
1961 case kCondLE:
1962 true_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001963 break;
1964 case kCondGT:
1965 false_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001966 break;
1967 case kCondGE:
1968 true_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001969 break;
Aart Bike9f37602015-10-09 11:15:55 -07001970 case kCondB:
1971 false_high_cond = kCondA;
1972 break;
1973 case kCondBE:
1974 true_high_cond = kCondB;
1975 break;
1976 case kCondA:
1977 false_high_cond = kCondB;
1978 break;
1979 case kCondAE:
1980 true_high_cond = kCondA;
1981 break;
Mark Mendellc4701932015-04-10 13:18:51 -04001982 }
1983
1984 if (right.IsConstant()) {
Vladimir Markocde64972023-04-25 16:40:06 +00001985 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellc4701932015-04-10 13:18:51 -04001986 int32_t val_high = High32Bits(value);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001987 int32_t val_low = Low32Bits(value);
Mark Mendellc4701932015-04-10 13:18:51 -04001988
Aart Bika19616e2016-02-01 18:57:58 -08001989 codegen_->Compare32BitValue(left_high, val_high);
Mark Mendellc4701932015-04-10 13:18:51 -04001990 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001991 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001992 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001993 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001994 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001995 __ j(X86Condition(true_high_cond), true_label);
1996 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001997 }
1998 // Must be equal high, so compare the lows.
Aart Bika19616e2016-02-01 18:57:58 -08001999 codegen_->Compare32BitValue(left_low, val_low);
Mark Mendell8659e842016-02-16 10:41:46 -05002000 } else if (right.IsRegisterPair()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002001 Register right_high = right.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01002002 Register right_low = right.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04002003
2004 __ cmpl(left_high, right_high);
2005 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07002006 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04002007 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07002008 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04002009 } else {
Aart Bike9f37602015-10-09 11:15:55 -07002010 __ j(X86Condition(true_high_cond), true_label);
2011 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04002012 }
2013 // Must be equal high, so compare the lows.
2014 __ cmpl(left_low, right_low);
Mark Mendell8659e842016-02-16 10:41:46 -05002015 } else {
2016 DCHECK(right.IsDoubleStackSlot());
2017 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
2018 if (if_cond == kCondNE) {
2019 __ j(X86Condition(true_high_cond), true_label);
2020 } else if (if_cond == kCondEQ) {
2021 __ j(X86Condition(false_high_cond), false_label);
2022 } else {
2023 __ j(X86Condition(true_high_cond), true_label);
2024 __ j(X86Condition(false_high_cond), false_label);
2025 }
2026 // Must be equal high, so compare the lows.
2027 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Mark Mendellc4701932015-04-10 13:18:51 -04002028 }
2029 // The last comparison might be unsigned.
2030 __ j(final_condition, true_label);
2031}
2032
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002033void InstructionCodeGeneratorX86::GenerateFPCompare(Location lhs,
2034 Location rhs,
2035 HInstruction* insn,
2036 bool is_double) {
Vladimir Marko79dc2172023-04-05 10:33:07 +00002037 HX86LoadFromConstantTable* const_area = insn->InputAt(1)->AsX86LoadFromConstantTableOrNull();
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002038 if (is_double) {
2039 if (rhs.IsFpuRegister()) {
2040 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
2041 } else if (const_area != nullptr) {
2042 DCHECK(const_area->IsEmittedAtUseSite());
2043 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(),
2044 codegen_->LiteralDoubleAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00002045 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002046 const_area->GetBaseMethodAddress(),
2047 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002048 } else {
2049 DCHECK(rhs.IsDoubleStackSlot());
2050 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
2051 }
2052 } else {
2053 if (rhs.IsFpuRegister()) {
2054 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
2055 } else if (const_area != nullptr) {
2056 DCHECK(const_area->IsEmittedAtUseSite());
2057 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(),
2058 codegen_->LiteralFloatAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00002059 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002060 const_area->GetBaseMethodAddress(),
2061 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002062 } else {
2063 DCHECK(rhs.IsStackSlot());
2064 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
2065 }
2066 }
2067}
2068
Mark Mendell152408f2015-12-31 12:28:50 -05002069template<class LabelType>
David Brazdil0debae72015-11-12 18:37:00 +00002070void InstructionCodeGeneratorX86::GenerateCompareTestAndBranch(HCondition* condition,
Mark Mendell152408f2015-12-31 12:28:50 -05002071 LabelType* true_target_in,
2072 LabelType* false_target_in) {
David Brazdil0debae72015-11-12 18:37:00 +00002073 // Generated branching requires both targets to be explicit. If either of the
2074 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Mark Mendell152408f2015-12-31 12:28:50 -05002075 LabelType fallthrough_target;
2076 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
2077 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
David Brazdil0debae72015-11-12 18:37:00 +00002078
Mark Mendellc4701932015-04-10 13:18:51 -04002079 LocationSummary* locations = condition->GetLocations();
2080 Location left = locations->InAt(0);
2081 Location right = locations->InAt(1);
2082
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002083 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04002084 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002085 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002086 GenerateLongComparesAndJumps(condition, true_target, false_target);
2087 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002088 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002089 GenerateFPCompare(left, right, condition, false);
Mark Mendellc4701932015-04-10 13:18:51 -04002090 GenerateFPJumps(condition, true_target, false_target);
2091 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002092 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002093 GenerateFPCompare(left, right, condition, true);
Mark Mendellc4701932015-04-10 13:18:51 -04002094 GenerateFPJumps(condition, true_target, false_target);
2095 break;
2096 default:
2097 LOG(FATAL) << "Unexpected compare type " << type;
2098 }
2099
David Brazdil0debae72015-11-12 18:37:00 +00002100 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04002101 __ jmp(false_target);
2102 }
David Brazdil0debae72015-11-12 18:37:00 +00002103
2104 if (fallthrough_target.IsLinked()) {
2105 __ Bind(&fallthrough_target);
2106 }
Mark Mendellc4701932015-04-10 13:18:51 -04002107}
2108
David Brazdil0debae72015-11-12 18:37:00 +00002109static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
2110 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
2111 // are set only strictly before `branch`. We can't use the eflags on long/FP
2112 // conditions if they are materialized due to the complex branching.
2113 return cond->IsCondition() &&
2114 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002115 cond->InputAt(0)->GetType() != DataType::Type::kInt64 &&
2116 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00002117}
2118
Mark Mendell152408f2015-12-31 12:28:50 -05002119template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002120void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002121 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05002122 LabelType* true_target,
2123 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002124 HInstruction* cond = instruction->InputAt(condition_input_index);
2125
2126 if (true_target == nullptr && false_target == nullptr) {
2127 // Nothing to do. The code always falls through.
2128 return;
2129 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002130 // Constant condition, statically compared against "true" (integer value 1).
Vladimir Markocde64972023-04-25 16:40:06 +00002131 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002132 if (true_target != nullptr) {
2133 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01002134 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01002135 } else {
Vladimir Markocde64972023-04-25 16:40:06 +00002136 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002137 if (false_target != nullptr) {
2138 __ jmp(false_target);
2139 }
2140 }
2141 return;
2142 }
2143
2144 // The following code generates these patterns:
2145 // (1) true_target == nullptr && false_target != nullptr
2146 // - opposite condition true => branch to false_target
2147 // (2) true_target != nullptr && false_target == nullptr
2148 // - condition true => branch to true_target
2149 // (3) true_target != nullptr && false_target != nullptr
2150 // - condition true => branch to true_target
2151 // - branch to false_target
2152 if (IsBooleanValueOrMaterializedCondition(cond)) {
2153 if (AreEflagsSetFrom(cond, instruction)) {
2154 if (true_target == nullptr) {
Vladimir Markocde64972023-04-25 16:40:06 +00002155 __ j(X86Condition(cond->AsCondition()->GetOppositeCondition()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002156 } else {
Vladimir Markocde64972023-04-25 16:40:06 +00002157 __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002158 }
2159 } else {
2160 // Materialized condition, compare against 0.
2161 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
2162 if (lhs.IsRegister()) {
2163 __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
2164 } else {
2165 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
2166 }
2167 if (true_target == nullptr) {
2168 __ j(kEqual, false_target);
2169 } else {
2170 __ j(kNotEqual, true_target);
2171 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002172 }
2173 } else {
David Brazdil0debae72015-11-12 18:37:00 +00002174 // Condition has not been materialized, use its inputs as the comparison and
2175 // its condition as the branch condition.
Vladimir Markocde64972023-04-25 16:40:06 +00002176 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00002177
2178 // If this is a long or FP comparison that has been folded into
2179 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002180 DataType::Type type = condition->InputAt(0)->GetType();
2181 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00002182 GenerateCompareTestAndBranch(condition, true_target, false_target);
2183 return;
2184 }
2185
2186 Location lhs = condition->GetLocations()->InAt(0);
2187 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002188 // LHS is guaranteed to be in a register (see LocationsBuilderX86::HandleCondition).
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002189 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002190 if (true_target == nullptr) {
2191 __ j(X86Condition(condition->GetOppositeCondition()), false_target);
2192 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04002193 __ j(X86Condition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07002194 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002195 }
David Brazdil0debae72015-11-12 18:37:00 +00002196
2197 // If neither branch falls through (case 3), the conditional branch to `true_target`
2198 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2199 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002200 __ jmp(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002201 }
2202}
2203
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002204void LocationsBuilderX86::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002205 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002206 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002207 locations->SetInAt(0, Location::Any());
2208 }
2209}
2210
2211void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002212 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2213 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2214 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2215 nullptr : codegen_->GetLabelOf(true_successor);
2216 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2217 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08002218 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002219}
2220
2221void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002222 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002223 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01002224 InvokeRuntimeCallingConvention calling_convention;
2225 RegisterSet caller_saves = RegisterSet::Empty();
2226 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2227 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00002228 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002229 locations->SetInAt(0, Location::Any());
2230 }
2231}
2232
2233void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002234 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00002235 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08002236 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00002237 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08002238 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00002239}
2240
Mingyao Yang063fc772016-08-02 11:02:54 -07002241void LocationsBuilderX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002242 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07002243 LocationSummary(flag, LocationSummary::kNoCall);
2244 locations->SetOut(Location::RequiresRegister());
2245}
2246
2247void InstructionCodeGeneratorX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2248 __ movl(flag->GetLocations()->Out().AsRegister<Register>(),
2249 Address(ESP, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
2250}
2251
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002252static bool SelectCanUseCMOV(HSelect* select) {
2253 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002254 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002255 return false;
2256 }
2257
2258 // A FP condition doesn't generate the single CC that we need.
2259 // In 32 bit mode, a long condition doesn't generate a single CC either.
2260 HInstruction* condition = select->GetCondition();
2261 if (condition->IsCondition()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002262 DataType::Type compare_type = condition->InputAt(0)->GetType();
2263 if (compare_type == DataType::Type::kInt64 ||
2264 DataType::IsFloatingPointType(compare_type)) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002265 return false;
2266 }
2267 }
2268
2269 // We can generate a CMOV for this Select.
2270 return true;
2271}
2272
David Brazdil74eb1b22015-12-14 11:44:01 +00002273void LocationsBuilderX86::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002274 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002275 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00002276 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002277 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00002278 } else {
2279 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002280 if (SelectCanUseCMOV(select)) {
2281 if (select->InputAt(1)->IsConstant()) {
2282 // Cmov can't handle a constant value.
2283 locations->SetInAt(1, Location::RequiresRegister());
2284 } else {
2285 locations->SetInAt(1, Location::Any());
2286 }
2287 } else {
2288 locations->SetInAt(1, Location::Any());
2289 }
David Brazdil74eb1b22015-12-14 11:44:01 +00002290 }
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002291 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2292 locations->SetInAt(2, Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00002293 }
2294 locations->SetOut(Location::SameAsFirstInput());
2295}
2296
2297void InstructionCodeGeneratorX86::VisitSelect(HSelect* select) {
2298 LocationSummary* locations = select->GetLocations();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002299 DCHECK(locations->InAt(0).Equals(locations->Out()));
2300 if (SelectCanUseCMOV(select)) {
2301 // If both the condition and the source types are integer, we can generate
2302 // a CMOV to implement Select.
2303
2304 HInstruction* select_condition = select->GetCondition();
2305 Condition cond = kNotEqual;
2306
2307 // Figure out how to test the 'condition'.
2308 if (select_condition->IsCondition()) {
Vladimir Markocde64972023-04-25 16:40:06 +00002309 HCondition* condition = select_condition->AsCondition();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002310 if (!condition->IsEmittedAtUseSite()) {
2311 // This was a previously materialized condition.
2312 // Can we use the existing condition code?
2313 if (AreEflagsSetFrom(condition, select)) {
2314 // Materialization was the previous instruction. Condition codes are right.
2315 cond = X86Condition(condition->GetCondition());
2316 } else {
2317 // No, we have to recreate the condition code.
2318 Register cond_reg = locations->InAt(2).AsRegister<Register>();
2319 __ testl(cond_reg, cond_reg);
2320 }
2321 } else {
2322 // We can't handle FP or long here.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002323 DCHECK_NE(condition->InputAt(0)->GetType(), DataType::Type::kInt64);
2324 DCHECK(!DataType::IsFloatingPointType(condition->InputAt(0)->GetType()));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002325 LocationSummary* cond_locations = condition->GetLocations();
Roland Levillain0b671c02016-08-19 12:02:34 +01002326 codegen_->GenerateIntCompare(cond_locations->InAt(0), cond_locations->InAt(1));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002327 cond = X86Condition(condition->GetCondition());
2328 }
2329 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01002330 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002331 Register cond_reg = locations->InAt(2).AsRegister<Register>();
2332 __ testl(cond_reg, cond_reg);
2333 }
2334
2335 // If the condition is true, overwrite the output, which already contains false.
2336 Location false_loc = locations->InAt(0);
2337 Location true_loc = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002338 if (select->GetType() == DataType::Type::kInt64) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002339 // 64 bit conditional move.
2340 Register false_high = false_loc.AsRegisterPairHigh<Register>();
2341 Register false_low = false_loc.AsRegisterPairLow<Register>();
2342 if (true_loc.IsRegisterPair()) {
2343 __ cmovl(cond, false_high, true_loc.AsRegisterPairHigh<Register>());
2344 __ cmovl(cond, false_low, true_loc.AsRegisterPairLow<Register>());
2345 } else {
2346 __ cmovl(cond, false_high, Address(ESP, true_loc.GetHighStackIndex(kX86WordSize)));
2347 __ cmovl(cond, false_low, Address(ESP, true_loc.GetStackIndex()));
2348 }
2349 } else {
2350 // 32 bit conditional move.
2351 Register false_reg = false_loc.AsRegister<Register>();
2352 if (true_loc.IsRegister()) {
2353 __ cmovl(cond, false_reg, true_loc.AsRegister<Register>());
2354 } else {
2355 __ cmovl(cond, false_reg, Address(ESP, true_loc.GetStackIndex()));
2356 }
2357 }
2358 } else {
2359 NearLabel false_target;
2360 GenerateTestAndBranch<NearLabel>(
Andreas Gampe3db70682018-12-26 15:12:03 -08002361 select, /* condition_input_index= */ 2, /* true_target= */ nullptr, &false_target);
Mark Mendell0c5b18e2016-02-06 13:58:35 -05002362 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2363 __ Bind(&false_target);
2364 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002365}
2366
Santiago Aboy Solanescaf9b652022-06-24 10:03:30 +01002367void LocationsBuilderX86::VisitNop(HNop* nop) {
2368 new (GetGraph()->GetAllocator()) LocationSummary(nop);
David Srbecky0cf44932015-12-09 14:09:59 +00002369}
2370
Santiago Aboy Solanescaf9b652022-06-24 10:03:30 +01002371void InstructionCodeGeneratorX86::VisitNop(HNop*) {
2372 // The environment recording already happened in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002373}
2374
Vladimir Markodec78172020-06-19 15:31:23 +01002375void CodeGeneratorX86::IncreaseFrame(size_t adjustment) {
2376 __ subl(ESP, Immediate(adjustment));
2377 __ cfi().AdjustCFAOffset(adjustment);
2378}
2379
2380void CodeGeneratorX86::DecreaseFrame(size_t adjustment) {
2381 __ addl(ESP, Immediate(adjustment));
2382 __ cfi().AdjustCFAOffset(-adjustment);
2383}
2384
David Srbeckyc7098ff2016-02-09 14:30:11 +00002385void CodeGeneratorX86::GenerateNop() {
2386 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002387}
2388
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002389void LocationsBuilderX86::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002390 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002391 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04002392 // Handle the long/FP comparisons made in instruction simplification.
2393 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002394 case DataType::Type::kInt64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002395 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell8659e842016-02-16 10:41:46 -05002396 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00002397 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002398 locations->SetOut(Location::RequiresRegister());
2399 }
2400 break;
2401 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002402 case DataType::Type::kFloat32:
2403 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002404 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002405 if (cond->InputAt(1)->IsX86LoadFromConstantTable()) {
2406 DCHECK(cond->InputAt(1)->IsEmittedAtUseSite());
2407 } else if (cond->InputAt(1)->IsConstant()) {
2408 locations->SetInAt(1, Location::RequiresFpuRegister());
2409 } else {
2410 locations->SetInAt(1, Location::Any());
2411 }
David Brazdilb3e773e2016-01-26 11:28:37 +00002412 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002413 locations->SetOut(Location::RequiresRegister());
2414 }
2415 break;
2416 }
2417 default:
2418 locations->SetInAt(0, Location::RequiresRegister());
2419 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00002420 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002421 // We need a byte register.
2422 locations->SetOut(Location::RegisterLocation(ECX));
2423 }
2424 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01002425 }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002426}
2427
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002428void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002429 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002430 return;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002431 }
Mark Mendellc4701932015-04-10 13:18:51 -04002432
2433 LocationSummary* locations = cond->GetLocations();
2434 Location lhs = locations->InAt(0);
2435 Location rhs = locations->InAt(1);
2436 Register reg = locations->Out().AsRegister<Register>();
Mark Mendell152408f2015-12-31 12:28:50 -05002437 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002438
2439 switch (cond->InputAt(0)->GetType()) {
2440 default: {
2441 // Integer case.
2442
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01002443 // Clear output register: setb only sets the low byte.
Mark Mendellc4701932015-04-10 13:18:51 -04002444 __ xorl(reg, reg);
Roland Levillain0b671c02016-08-19 12:02:34 +01002445 codegen_->GenerateIntCompare(lhs, rhs);
Aart Bike9f37602015-10-09 11:15:55 -07002446 __ setb(X86Condition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002447 return;
2448 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002449 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002450 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
2451 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002452 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002453 GenerateFPCompare(lhs, rhs, cond, false);
Mark Mendellc4701932015-04-10 13:18:51 -04002454 GenerateFPJumps(cond, &true_label, &false_label);
2455 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002456 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002457 GenerateFPCompare(lhs, rhs, cond, true);
Mark Mendellc4701932015-04-10 13:18:51 -04002458 GenerateFPJumps(cond, &true_label, &false_label);
2459 break;
2460 }
2461
2462 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002463 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002464
Roland Levillain4fa13f62015-07-06 18:11:54 +01002465 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002466 __ Bind(&false_label);
2467 __ xorl(reg, reg);
2468 __ jmp(&done_label);
2469
Roland Levillain4fa13f62015-07-06 18:11:54 +01002470 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002471 __ Bind(&true_label);
2472 __ movl(reg, Immediate(1));
2473 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002474}
2475
2476void LocationsBuilderX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002477 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002478}
2479
2480void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002481 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002482}
2483
2484void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002485 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002486}
2487
2488void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002489 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002490}
2491
2492void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002493 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002494}
2495
2496void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002497 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002498}
2499
2500void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002501 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002502}
2503
2504void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002505 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002506}
2507
2508void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002509 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002510}
2511
2512void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002513 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002514}
2515
2516void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002517 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002518}
2519
2520void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002521 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002522}
2523
Aart Bike9f37602015-10-09 11:15:55 -07002524void LocationsBuilderX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002525 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002526}
2527
2528void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002529 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002530}
2531
2532void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002533 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002534}
2535
2536void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002537 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002538}
2539
2540void LocationsBuilderX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002541 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002542}
2543
2544void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002545 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002546}
2547
2548void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002549 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002550}
2551
2552void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002553 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002554}
2555
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002556void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002557 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002558 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002559 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002560}
2561
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00002562void InstructionCodeGeneratorX86::VisitIntConstant([[maybe_unused]] HIntConstant* constant) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002563 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002564}
2565
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002566void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
2567 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002568 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002569 locations->SetOut(Location::ConstantLocation(constant));
2570}
2571
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00002572void InstructionCodeGeneratorX86::VisitNullConstant([[maybe_unused]] HNullConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002573 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002574}
2575
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002576void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002577 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002578 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002579 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002580}
2581
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00002582void InstructionCodeGeneratorX86::VisitLongConstant([[maybe_unused]] HLongConstant* constant) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002583 // Will be generated at use site.
2584}
2585
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002586void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
2587 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002588 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002589 locations->SetOut(Location::ConstantLocation(constant));
2590}
2591
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00002592void InstructionCodeGeneratorX86::VisitFloatConstant([[maybe_unused]] HFloatConstant* constant) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002593 // Will be generated at use site.
2594}
2595
2596void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
2597 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002598 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002599 locations->SetOut(Location::ConstantLocation(constant));
2600}
2601
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00002602void InstructionCodeGeneratorX86::VisitDoubleConstant([[maybe_unused]] HDoubleConstant* constant) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002603 // Will be generated at use site.
2604}
2605
Igor Murashkind01745e2017-04-05 16:40:31 -07002606void LocationsBuilderX86::VisitConstructorFence(HConstructorFence* constructor_fence) {
2607 constructor_fence->SetLocations(nullptr);
2608}
2609
2610void InstructionCodeGeneratorX86::VisitConstructorFence(
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00002611 [[maybe_unused]] HConstructorFence* constructor_fence) {
Igor Murashkind01745e2017-04-05 16:40:31 -07002612 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2613}
2614
Calin Juravle27df7582015-04-17 19:12:31 +01002615void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2616 memory_barrier->SetLocations(nullptr);
2617}
2618
2619void InstructionCodeGeneratorX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002620 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002621}
2622
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002623void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002624 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002625}
2626
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00002627void InstructionCodeGeneratorX86::VisitReturnVoid([[maybe_unused]] HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002628 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002629}
2630
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002631void LocationsBuilderX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002632 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002633 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Mythri Alle5097f832021-11-02 14:52:30 +00002634 SetInForReturnValue(ret, locations);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002635}
2636
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002637void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002638 switch (ret->InputAt(0)->GetType()) {
2639 case DataType::Type::kReference:
2640 case DataType::Type::kBool:
2641 case DataType::Type::kUint8:
2642 case DataType::Type::kInt8:
2643 case DataType::Type::kUint16:
2644 case DataType::Type::kInt16:
2645 case DataType::Type::kInt32:
2646 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
2647 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002648
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002649 case DataType::Type::kInt64:
2650 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
2651 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
2652 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002653
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002654 case DataType::Type::kFloat32:
2655 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
2656 if (GetGraph()->IsCompilingOsr()) {
2657 // To simplify callers of an OSR method, we put the return value in both
2658 // floating point and core registers.
2659 __ movd(EAX, XMM0);
2660 }
2661 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002662
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002663 case DataType::Type::kFloat64:
2664 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
2665 if (GetGraph()->IsCompilingOsr()) {
2666 // To simplify callers of an OSR method, we put the return value in both
2667 // floating point and core registers.
2668 __ movd(EAX, XMM0);
2669 // Use XMM1 as temporary register to not clobber XMM0.
2670 __ movaps(XMM1, XMM0);
2671 __ psrlq(XMM1, Immediate(32));
2672 __ movd(EDX, XMM1);
2673 }
2674 break;
2675
2676 default:
2677 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002678 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002679 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002680}
2681
Calin Juravle175dc732015-08-25 15:42:32 +01002682void LocationsBuilderX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2683 // The trampoline uses the same calling convention as dex calling conventions,
2684 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2685 // the method_idx.
2686 HandleInvoke(invoke);
2687}
2688
2689void InstructionCodeGeneratorX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2690 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2691}
2692
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002693void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002694 // Explicit clinit checks triggered by static invokes must have been pruned by
2695 // art::PrepareForRegisterAllocation.
2696 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002697
Mark Mendellfb8d2792015-03-31 22:16:59 -04002698 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002699 if (intrinsic.TryDispatch(invoke)) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002700 if (invoke->GetLocations()->CanCall() &&
2701 invoke->HasPcRelativeMethodLoadKind() &&
2702 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).IsInvalid()) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00002703 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002704 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002705 return;
2706 }
2707
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01002708 if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
Vladimir Marko86c87522020-05-11 16:55:55 +01002709 CriticalNativeCallingConventionVisitorX86 calling_convention_visitor(
2710 /*for_register_allocation=*/ true);
2711 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
2712 } else {
2713 HandleInvoke(invoke);
2714 }
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002715
Vladimir Marko86c87522020-05-11 16:55:55 +01002716 // For PC-relative load kinds the invoke has an extra input, the PC-relative address base.
Vladimir Marko65979462017-05-19 17:25:12 +01002717 if (invoke->HasPcRelativeMethodLoadKind()) {
Vladimir Markob4536b72015-11-24 13:45:23 +00002718 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002719 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002720}
2721
Mark Mendell09ed1a32015-03-25 08:30:06 -04002722static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) {
2723 if (invoke->GetLocations()->Intrinsified()) {
2724 IntrinsicCodeGeneratorX86 intrinsic(codegen);
2725 intrinsic.Dispatch(invoke);
2726 return true;
2727 }
2728 return false;
2729}
2730
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002731void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002732 // Explicit clinit checks triggered by static invokes must have been pruned by
2733 // art::PrepareForRegisterAllocation.
2734 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002735
Mark Mendell09ed1a32015-03-25 08:30:06 -04002736 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2737 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002738 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002739
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002740 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002741 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002742 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002743}
2744
2745void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00002746 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2747 if (intrinsic.TryDispatch(invoke)) {
2748 return;
2749 }
2750
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002751 HandleInvoke(invoke);
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00002752
2753 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002754 // Add one temporary for inline cache update.
2755 invoke->GetLocations()->AddTemp(Location::RegisterLocation(EBP));
2756 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002757}
2758
2759void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002760 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002761 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002762}
2763
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002764void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002765 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2766 return;
2767 }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002768
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002769 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002770 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002771}
2772
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002773void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002774 // This call to HandleInvoke allocates a temporary (core) register
2775 // which is also used to transfer the hidden argument from FP to
2776 // core register.
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002777 HandleInvoke(invoke);
2778 // Add the hidden argument.
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002779 invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM7));
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002780
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00002781 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002782 // Add one temporary for inline cache update.
2783 invoke->GetLocations()->AddTemp(Location::RegisterLocation(EBP));
2784 }
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002785
2786 // For PC-relative load kinds the invoke has an extra input, the PC-relative address base.
2787 if (IsPcRelativeMethodLoadKind(invoke->GetHiddenArgumentLoadKind())) {
2788 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
2789 }
2790
2791 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive) {
2792 invoke->GetLocations()->SetInAt(invoke->GetNumberOfArguments() - 1,
2793 Location::RequiresRegister());
2794 }
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002795}
2796
2797void CodeGeneratorX86::MaybeGenerateInlineCacheCheck(HInstruction* instruction, Register klass) {
2798 DCHECK_EQ(EAX, klass);
Nicolas Geoffray17a39ba2019-11-27 20:57:48 +00002799 // We know the destination of an intrinsic, so no need to record inline
2800 // caches (also the intrinsic location builder doesn't request an additional
2801 // temporary).
2802 if (!instruction->GetLocations()->Intrinsified() &&
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00002803 GetGraph()->IsCompilingBaseline() &&
Nicolas Geoffray17a39ba2019-11-27 20:57:48 +00002804 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002805 DCHECK(!instruction->GetEnvironment()->IsFromInlinedInvoke());
Nicolas Geoffray9e598902021-11-19 14:53:07 +00002806 ProfilingInfo* info = GetGraph()->GetProfilingInfo();
2807 DCHECK(info != nullptr);
2808 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
2809 uint32_t address = reinterpret_cast32<uint32_t>(cache);
2810 if (kIsDebugBuild) {
2811 uint32_t temp_index = instruction->GetLocations()->GetTempCount() - 1u;
2812 CHECK_EQ(EBP, instruction->GetLocations()->GetTemp(temp_index).AsRegister<Register>());
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002813 }
Nicolas Geoffray9e598902021-11-19 14:53:07 +00002814 Register temp = EBP;
2815 NearLabel done;
2816 __ movl(temp, Immediate(address));
2817 // Fast path for a monomorphic cache.
2818 __ cmpl(klass, Address(temp, InlineCache::ClassesOffset().Int32Value()));
2819 __ j(kEqual, &done);
2820 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(kQuickUpdateInlineCache).Int32Value());
2821 __ Bind(&done);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002822 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002823}
2824
2825void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
2826 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002827 LocationSummary* locations = invoke->GetLocations();
2828 Register temp = locations->GetTemp(0).AsRegister<Register>();
2829 XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002830 Location receiver = locations->InAt(0);
2831 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2832
Roland Levillain0d5a2812015-11-13 10:07:31 +00002833 // Set the hidden argument. This is safe to do this here, as XMM7
2834 // won't be modified thereafter, before the `call` instruction.
2835 DCHECK_EQ(XMM7, hidden_reg);
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002836 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive) {
2837 __ movd(hidden_reg, locations->InAt(invoke->GetNumberOfArguments() - 1).AsRegister<Register>());
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002838 } else if (invoke->GetHiddenArgumentLoadKind() != MethodLoadKind::kRuntimeCall) {
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01002839 codegen_->LoadMethod(invoke->GetHiddenArgumentLoadKind(), locations->GetTemp(0), invoke);
2840 __ movd(hidden_reg, temp);
2841 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002842
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002843 if (receiver.IsStackSlot()) {
2844 __ movl(temp, Address(ESP, receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002845 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002846 __ movl(temp, Address(temp, class_offset));
2847 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002848 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002849 __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002850 }
Roland Levillain4d027112015-07-01 15:41:14 +01002851 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002852 // Instead of simply (possibly) unpoisoning `temp` here, we should
2853 // emit a read barrier for the previous class reference load.
2854 // However this is not required in practice, as this is an
2855 // intermediate/temporary reference and because the current
2856 // concurrent copying collector keeps the from-space memory
2857 // intact/accessible until the end of the marking phase (the
2858 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002859 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002860
2861 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
2862
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002863 // temp = temp->GetAddressOfIMT()
2864 __ movl(temp,
2865 Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002866 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002867 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002868 invoke->GetImtIndex(), kX86PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002869 __ movl(temp, Address(temp, method_offset));
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00002870 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRuntimeCall) {
2871 // We pass the method from the IMT in case of a conflict. This will ensure
2872 // we go into the runtime to resolve the actual method.
2873 __ movd(hidden_reg, temp);
2874 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002875 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002876 __ call(Address(temp,
Andreas Gampe542451c2016-07-26 09:02:02 -07002877 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002878
2879 DCHECK(!codegen_->IsLeafMethod());
2880 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2881}
2882
Orion Hodsonac141392017-01-13 11:53:47 +00002883void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00002884 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2885 if (intrinsic.TryDispatch(invoke)) {
2886 return;
2887 }
Orion Hodsonac141392017-01-13 11:53:47 +00002888 HandleInvoke(invoke);
2889}
2890
2891void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00002892 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2893 return;
2894 }
Orion Hodsonac141392017-01-13 11:53:47 +00002895 codegen_->GenerateInvokePolymorphicCall(invoke);
2896}
2897
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002898void LocationsBuilderX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2899 HandleInvoke(invoke);
2900}
2901
2902void InstructionCodeGeneratorX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2903 codegen_->GenerateInvokeCustomCall(invoke);
2904}
2905
Roland Levillain88cb1752014-10-20 16:36:47 +01002906void LocationsBuilderX86::VisitNeg(HNeg* neg) {
2907 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002908 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002909 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002910 case DataType::Type::kInt32:
2911 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002912 locations->SetInAt(0, Location::RequiresRegister());
2913 locations->SetOut(Location::SameAsFirstInput());
2914 break;
2915
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002916 case DataType::Type::kFloat32:
Roland Levillain5368c212014-11-27 15:03:41 +00002917 locations->SetInAt(0, Location::RequiresFpuRegister());
2918 locations->SetOut(Location::SameAsFirstInput());
2919 locations->AddTemp(Location::RequiresRegister());
2920 locations->AddTemp(Location::RequiresFpuRegister());
2921 break;
2922
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002923 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002924 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002925 locations->SetOut(Location::SameAsFirstInput());
2926 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002927 break;
2928
2929 default:
2930 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2931 }
2932}
2933
2934void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
2935 LocationSummary* locations = neg->GetLocations();
2936 Location out = locations->Out();
2937 Location in = locations->InAt(0);
2938 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002939 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002940 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002941 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002942 __ negl(out.AsRegister<Register>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002943 break;
2944
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002945 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002946 DCHECK(in.IsRegisterPair());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002947 DCHECK(in.Equals(out));
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002948 __ negl(out.AsRegisterPairLow<Register>());
2949 // Negation is similar to subtraction from zero. The least
2950 // significant byte triggers a borrow when it is different from
2951 // zero; to take it into account, add 1 to the most significant
2952 // byte if the carry flag (CF) is set to 1 after the first NEGL
2953 // operation.
2954 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2955 __ negl(out.AsRegisterPairHigh<Register>());
2956 break;
2957
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002958 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002959 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002960 Register constant = locations->GetTemp(0).AsRegister<Register>();
2961 XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002962 // Implement float negation with an exclusive or with value
2963 // 0x80000000 (mask for bit 31, representing the sign of a
2964 // single-precision floating-point number).
2965 __ movl(constant, Immediate(INT32_C(0x80000000)));
2966 __ movd(mask, constant);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002967 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002968 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002969 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002970
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002971 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002972 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002973 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002974 // Implement double negation with an exclusive or with value
2975 // 0x8000000000000000 (mask for bit 63, representing the sign of
2976 // a double-precision floating-point number).
2977 __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002978 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002979 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002980 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002981
2982 default:
2983 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2984 }
2985}
2986
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002987void LocationsBuilderX86::VisitX86FPNeg(HX86FPNeg* neg) {
2988 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002989 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002990 DCHECK(DataType::IsFloatingPointType(neg->GetType()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002991 locations->SetInAt(0, Location::RequiresFpuRegister());
2992 locations->SetInAt(1, Location::RequiresRegister());
2993 locations->SetOut(Location::SameAsFirstInput());
2994 locations->AddTemp(Location::RequiresFpuRegister());
2995}
2996
2997void InstructionCodeGeneratorX86::VisitX86FPNeg(HX86FPNeg* neg) {
2998 LocationSummary* locations = neg->GetLocations();
2999 Location out = locations->Out();
3000 DCHECK(locations->InAt(0).Equals(out));
3001
3002 Register constant_area = locations->InAt(1).AsRegister<Register>();
3003 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003004 if (neg->GetType() == DataType::Type::kFloat32) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003005 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x80000000),
3006 neg->GetBaseMethodAddress(),
3007 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00003008 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
3009 } else {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003010 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000),
3011 neg->GetBaseMethodAddress(),
3012 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00003013 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
3014 }
3015}
3016
Roland Levillaindff1f282014-11-05 14:15:05 +00003017void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003018 DataType::Type result_type = conversion->GetResultType();
3019 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003020 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3021 << input_type << " -> " << result_type;
Roland Levillain624279f2014-12-04 11:54:28 +00003022
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003023 // The float-to-long and double-to-long type conversions rely on a
3024 // call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00003025 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003026 ((input_type == DataType::Type::kFloat32 || input_type == DataType::Type::kFloat64)
3027 && result_type == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003028 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00003029 : LocationSummary::kNoCall;
3030 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003031 new (GetGraph()->GetAllocator()) LocationSummary(conversion, call_kind);
Roland Levillain624279f2014-12-04 11:54:28 +00003032
Roland Levillaindff1f282014-11-05 14:15:05 +00003033 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003034 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003035 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00003036 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003037 case DataType::Type::kUint8:
3038 case DataType::Type::kInt8:
3039 case DataType::Type::kUint16:
3040 case DataType::Type::kInt16:
3041 case DataType::Type::kInt32:
3042 locations->SetInAt(0, Location::ByteRegisterOrConstant(ECX, conversion->InputAt(0)));
3043 // Make the output overlap to please the register allocator. This greatly simplifies
3044 // the validation of the linear scan implementation
3045 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3046 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003047 case DataType::Type::kInt64: {
Vladimir Markob52bbde2016-02-12 12:06:05 +00003048 HInstruction* input = conversion->InputAt(0);
3049 Location input_location = input->IsConstant()
Vladimir Markof76ca8c2023-04-05 15:24:41 +00003050 ? Location::ConstantLocation(input)
Vladimir Markob52bbde2016-02-12 12:06:05 +00003051 : Location::RegisterPairLocation(EAX, EDX);
3052 locations->SetInAt(0, input_location);
3053 // Make the output overlap to please the register allocator. This greatly simplifies
3054 // the validation of the linear scan implementation
3055 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3056 break;
3057 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00003058
3059 default:
3060 LOG(FATAL) << "Unexpected type conversion from " << input_type
3061 << " to " << result_type;
3062 }
3063 break;
3064
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003065 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003066 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003067 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
3068 locations->SetInAt(0, Location::Any());
3069 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00003070 break;
3071
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003072 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00003073 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003074 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00003075 locations->SetInAt(0, Location::Any());
3076 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3077 break;
3078
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003079 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00003080 locations->SetInAt(0, Location::RequiresFpuRegister());
3081 locations->SetOut(Location::RequiresRegister());
3082 locations->AddTemp(Location::RequiresFpuRegister());
3083 break;
3084
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003085 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003086 locations->SetInAt(0, Location::RequiresFpuRegister());
3087 locations->SetOut(Location::RequiresRegister());
3088 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00003089 break;
3090
3091 default:
3092 LOG(FATAL) << "Unexpected type conversion from " << input_type
3093 << " to " << result_type;
3094 }
3095 break;
3096
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003097 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00003098 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003099 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003100 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003101 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003102 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003103 case DataType::Type::kInt16:
3104 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00003105 locations->SetInAt(0, Location::RegisterLocation(EAX));
3106 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
3107 break;
3108
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003109 case DataType::Type::kFloat32:
3110 case DataType::Type::kFloat64: {
Vladimir Marko949c91f2015-01-27 10:48:44 +00003111 InvokeRuntimeCallingConvention calling_convention;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00003112 XmmRegister parameter = calling_convention.GetFpuRegisterAt(0);
3113 locations->SetInAt(0, Location::FpuRegisterLocation(parameter));
3114
Vladimir Marko949c91f2015-01-27 10:48:44 +00003115 // The runtime helper puts the result in EAX, EDX.
3116 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Vladimir Marko949c91f2015-01-27 10:48:44 +00003117 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +00003118 break;
Roland Levillaindff1f282014-11-05 14:15:05 +00003119
3120 default:
3121 LOG(FATAL) << "Unexpected type conversion from " << input_type
3122 << " to " << result_type;
3123 }
3124 break;
3125
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003126 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003127 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003128 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003129 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003130 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003131 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003132 case DataType::Type::kInt16:
3133 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00003134 locations->SetInAt(0, Location::RequiresRegister());
3135 locations->SetOut(Location::RequiresFpuRegister());
3136 break;
3137
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003138 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01003139 locations->SetInAt(0, Location::Any());
3140 locations->SetOut(Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00003141 break;
3142
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003143 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003144 locations->SetInAt(0, Location::RequiresFpuRegister());
3145 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003146 break;
3147
3148 default:
3149 LOG(FATAL) << "Unexpected type conversion from " << input_type
3150 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003151 }
Roland Levillaincff13742014-11-17 14:32:17 +00003152 break;
3153
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003154 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003155 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003156 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003157 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003158 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003159 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003160 case DataType::Type::kInt16:
3161 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00003162 locations->SetInAt(0, Location::RequiresRegister());
3163 locations->SetOut(Location::RequiresFpuRegister());
3164 break;
3165
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003166 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01003167 locations->SetInAt(0, Location::Any());
3168 locations->SetOut(Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00003169 break;
3170
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003171 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003172 locations->SetInAt(0, Location::RequiresFpuRegister());
3173 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003174 break;
3175
3176 default:
3177 LOG(FATAL) << "Unexpected type conversion from " << input_type
3178 << " to " << result_type;
3179 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003180 break;
3181
3182 default:
3183 LOG(FATAL) << "Unexpected type conversion from " << input_type
3184 << " to " << result_type;
3185 }
3186}
3187
3188void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
3189 LocationSummary* locations = conversion->GetLocations();
3190 Location out = locations->Out();
3191 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003192 DataType::Type result_type = conversion->GetResultType();
3193 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003194 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3195 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00003196 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003197 case DataType::Type::kUint8:
3198 switch (input_type) {
3199 case DataType::Type::kInt8:
3200 case DataType::Type::kUint16:
3201 case DataType::Type::kInt16:
3202 case DataType::Type::kInt32:
3203 if (in.IsRegister()) {
3204 __ movzxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
3205 } else {
3206 DCHECK(in.GetConstant()->IsIntConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003207 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003208 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
3209 }
3210 break;
3211 case DataType::Type::kInt64:
3212 if (in.IsRegisterPair()) {
3213 __ movzxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
3214 } else {
3215 DCHECK(in.GetConstant()->IsLongConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003216 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003217 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
3218 }
3219 break;
3220
3221 default:
3222 LOG(FATAL) << "Unexpected type conversion from " << input_type
3223 << " to " << result_type;
3224 }
3225 break;
3226
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003227 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00003228 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003229 case DataType::Type::kUint8:
3230 case DataType::Type::kUint16:
3231 case DataType::Type::kInt16:
3232 case DataType::Type::kInt32:
3233 if (in.IsRegister()) {
3234 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
3235 } else {
3236 DCHECK(in.GetConstant()->IsIntConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003237 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003238 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
3239 }
3240 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003241 case DataType::Type::kInt64:
Vladimir Markob52bbde2016-02-12 12:06:05 +00003242 if (in.IsRegisterPair()) {
3243 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
3244 } else {
3245 DCHECK(in.GetConstant()->IsLongConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003246 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Vladimir Markob52bbde2016-02-12 12:06:05 +00003247 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
3248 }
3249 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003250
3251 default:
3252 LOG(FATAL) << "Unexpected type conversion from " << input_type
3253 << " to " << result_type;
3254 }
3255 break;
3256
3257 case DataType::Type::kUint16:
3258 switch (input_type) {
3259 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003260 case DataType::Type::kInt16:
3261 case DataType::Type::kInt32:
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00003262 if (in.IsRegister()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003263 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
3264 } else if (in.IsStackSlot()) {
3265 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00003266 } else {
3267 DCHECK(in.GetConstant()->IsIntConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003268 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003269 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
3270 }
3271 break;
3272 case DataType::Type::kInt64:
3273 if (in.IsRegisterPair()) {
3274 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
3275 } else if (in.IsDoubleStackSlot()) {
3276 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
3277 } else {
3278 DCHECK(in.GetConstant()->IsLongConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003279 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003280 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00003281 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00003282 break;
3283
3284 default:
3285 LOG(FATAL) << "Unexpected type conversion from " << input_type
3286 << " to " << result_type;
3287 }
3288 break;
3289
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003290 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00003291 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003292 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003293 case DataType::Type::kInt32:
Roland Levillain01a8d712014-11-14 16:27:39 +00003294 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003295 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain01a8d712014-11-14 16:27:39 +00003296 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003297 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain01a8d712014-11-14 16:27:39 +00003298 } else {
3299 DCHECK(in.GetConstant()->IsIntConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003300 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003301 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
Roland Levillain01a8d712014-11-14 16:27:39 +00003302 }
3303 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003304 case DataType::Type::kInt64:
3305 if (in.IsRegisterPair()) {
3306 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
3307 } else if (in.IsDoubleStackSlot()) {
3308 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
3309 } else {
3310 DCHECK(in.GetConstant()->IsLongConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003311 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003312 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
3313 }
3314 break;
Roland Levillain01a8d712014-11-14 16:27:39 +00003315
3316 default:
3317 LOG(FATAL) << "Unexpected type conversion from " << input_type
3318 << " to " << result_type;
3319 }
3320 break;
3321
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003322 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00003323 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003324 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00003325 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003326 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00003327 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003328 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain946e1432014-11-11 17:35:19 +00003329 } else {
3330 DCHECK(in.IsConstant());
3331 DCHECK(in.GetConstant()->IsLongConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00003332 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003333 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00003334 }
3335 break;
3336
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003337 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00003338 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3339 Register output = out.AsRegister<Register>();
3340 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003341 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00003342
3343 __ movl(output, Immediate(kPrimIntMax));
3344 // temp = int-to-float(output)
3345 __ cvtsi2ss(temp, output);
3346 // if input >= temp goto done
3347 __ comiss(input, temp);
3348 __ j(kAboveEqual, &done);
3349 // if input == NaN goto nan
3350 __ j(kUnordered, &nan);
3351 // output = float-to-int-truncate(input)
3352 __ cvttss2si(output, input);
3353 __ jmp(&done);
3354 __ Bind(&nan);
3355 // output = 0
3356 __ xorl(output, output);
3357 __ Bind(&done);
3358 break;
3359 }
3360
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003361 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003362 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3363 Register output = out.AsRegister<Register>();
3364 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003365 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003366
3367 __ movl(output, Immediate(kPrimIntMax));
3368 // temp = int-to-double(output)
3369 __ cvtsi2sd(temp, output);
3370 // if input >= temp goto done
3371 __ comisd(input, temp);
3372 __ j(kAboveEqual, &done);
3373 // if input == NaN goto nan
3374 __ j(kUnordered, &nan);
3375 // output = double-to-int-truncate(input)
3376 __ cvttsd2si(output, input);
3377 __ jmp(&done);
3378 __ Bind(&nan);
3379 // output = 0
3380 __ xorl(output, output);
3381 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00003382 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003383 }
Roland Levillain946e1432014-11-11 17:35:19 +00003384
3385 default:
3386 LOG(FATAL) << "Unexpected type conversion from " << input_type
3387 << " to " << result_type;
3388 }
3389 break;
3390
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003391 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00003392 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003393 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003394 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003395 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003396 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003397 case DataType::Type::kInt16:
3398 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00003399 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
3400 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
Roland Levillain271ab9c2014-11-27 15:23:57 +00003401 DCHECK_EQ(in.AsRegister<Register>(), EAX);
Roland Levillaindff1f282014-11-05 14:15:05 +00003402 __ cdq();
3403 break;
3404
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003405 case DataType::Type::kFloat32:
Serban Constantinescuba45db02016-07-12 22:53:02 +01003406 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003407 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00003408 break;
3409
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003410 case DataType::Type::kFloat64:
Serban Constantinescuba45db02016-07-12 22:53:02 +01003411 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003412 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00003413 break;
3414
3415 default:
3416 LOG(FATAL) << "Unexpected type conversion from " << input_type
3417 << " to " << result_type;
3418 }
3419 break;
3420
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003421 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003422 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003423 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003424 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003425 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003426 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003427 case DataType::Type::kInt16:
3428 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003429 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00003430 break;
3431
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003432 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01003433 size_t adjustment = 0;
Roland Levillain6d0e4832014-11-27 18:31:21 +00003434
Roland Levillain232ade02015-04-20 15:14:36 +01003435 // Create stack space for the call to
3436 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstps below.
3437 // TODO: enhance register allocator to ask for stack temporaries.
3438 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003439 adjustment = DataType::Size(DataType::Type::kInt64);
Vladimir Markodec78172020-06-19 15:31:23 +01003440 codegen_->IncreaseFrame(adjustment);
Roland Levillain232ade02015-04-20 15:14:36 +01003441 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003442
Roland Levillain232ade02015-04-20 15:14:36 +01003443 // Load the value to the FP stack, using temporaries if needed.
3444 PushOntoFPStack(in, 0, adjustment, false, true);
3445
3446 if (out.IsStackSlot()) {
3447 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
3448 } else {
3449 __ fstps(Address(ESP, 0));
3450 Location stack_temp = Location::StackSlot(0);
3451 codegen_->Move32(out, stack_temp);
3452 }
3453
3454 // Remove the temporary stack space we allocated.
3455 if (adjustment != 0) {
Vladimir Markodec78172020-06-19 15:31:23 +01003456 codegen_->DecreaseFrame(adjustment);
Roland Levillain232ade02015-04-20 15:14:36 +01003457 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003458 break;
3459 }
3460
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003461 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003462 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00003463 break;
3464
3465 default:
3466 LOG(FATAL) << "Unexpected type conversion from " << input_type
3467 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003468 }
Roland Levillaincff13742014-11-17 14:32:17 +00003469 break;
3470
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003471 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003472 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003473 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003474 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003475 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003476 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003477 case DataType::Type::kInt16:
3478 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003479 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00003480 break;
3481
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003482 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01003483 size_t adjustment = 0;
Roland Levillain647b9ed2014-11-27 12:06:00 +00003484
Roland Levillain232ade02015-04-20 15:14:36 +01003485 // Create stack space for the call to
3486 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstpl below.
3487 // TODO: enhance register allocator to ask for stack temporaries.
3488 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003489 adjustment = DataType::Size(DataType::Type::kInt64);
Vladimir Markodec78172020-06-19 15:31:23 +01003490 codegen_->IncreaseFrame(adjustment);
Roland Levillain232ade02015-04-20 15:14:36 +01003491 }
3492
3493 // Load the value to the FP stack, using temporaries if needed.
3494 PushOntoFPStack(in, 0, adjustment, false, true);
3495
3496 if (out.IsDoubleStackSlot()) {
3497 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
3498 } else {
3499 __ fstpl(Address(ESP, 0));
3500 Location stack_temp = Location::DoubleStackSlot(0);
3501 codegen_->Move64(out, stack_temp);
3502 }
3503
3504 // Remove the temporary stack space we allocated.
3505 if (adjustment != 0) {
Vladimir Markodec78172020-06-19 15:31:23 +01003506 codegen_->DecreaseFrame(adjustment);
Roland Levillain232ade02015-04-20 15:14:36 +01003507 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003508 break;
3509 }
3510
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003511 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003512 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00003513 break;
3514
3515 default:
3516 LOG(FATAL) << "Unexpected type conversion from " << input_type
3517 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003518 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003519 break;
3520
3521 default:
3522 LOG(FATAL) << "Unexpected type conversion from " << input_type
3523 << " to " << result_type;
3524 }
3525}
3526
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003527void LocationsBuilderX86::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003528 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003529 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003530 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003531 case DataType::Type::kInt32: {
Mark Mendell09b84632015-02-13 17:48:38 -05003532 locations->SetInAt(0, Location::RequiresRegister());
3533 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3534 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3535 break;
3536 }
3537
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003538 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003539 locations->SetInAt(0, Location::RequiresRegister());
3540 locations->SetInAt(1, Location::Any());
3541 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003542 break;
3543 }
3544
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003545 case DataType::Type::kFloat32:
3546 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003547 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003548 if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3549 DCHECK(add->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003550 } else if (add->InputAt(1)->IsConstant()) {
3551 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003552 } else {
3553 locations->SetInAt(1, Location::Any());
3554 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003555 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003556 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003557 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003558
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003559 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003560 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Elliott Hughesc1896c92018-11-29 11:33:18 -08003561 UNREACHABLE();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003562 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003563}
3564
3565void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
3566 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003567 Location first = locations->InAt(0);
3568 Location second = locations->InAt(1);
Mark Mendell09b84632015-02-13 17:48:38 -05003569 Location out = locations->Out();
3570
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003571 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003572 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003573 if (second.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003574 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3575 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003576 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3577 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
Mark Mendell09b84632015-02-13 17:48:38 -05003578 } else {
3579 __ leal(out.AsRegister<Register>(), Address(
3580 first.AsRegister<Register>(), second.AsRegister<Register>(), TIMES_1, 0));
3581 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003582 } else if (second.IsConstant()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003583 int32_t value = second.GetConstant()->AsIntConstant()->GetValue();
Mark Mendell09b84632015-02-13 17:48:38 -05003584 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3585 __ addl(out.AsRegister<Register>(), Immediate(value));
3586 } else {
3587 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
3588 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003589 } else {
Mark Mendell09b84632015-02-13 17:48:38 -05003590 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003591 __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003592 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003593 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003594 }
3595
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003596 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003597 if (second.IsRegisterPair()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003598 __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3599 __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003600 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003601 __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3602 __ adcl(first.AsRegisterPairHigh<Register>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003603 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003604 } else {
3605 DCHECK(second.IsConstant()) << second;
Vladimir Markocde64972023-04-25 16:40:06 +00003606 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003607 __ addl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3608 __ adcl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003609 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003610 break;
3611 }
3612
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003613 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003614 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003615 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003616 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003617 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003618 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003619 __ addss(first.AsFpuRegister<XmmRegister>(),
3620 codegen_->LiteralFloatAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00003621 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003622 const_area->GetBaseMethodAddress(),
3623 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003624 } else {
3625 DCHECK(second.IsStackSlot());
3626 __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003627 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003628 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003629 }
3630
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003631 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003632 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003633 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003634 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003635 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003636 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003637 __ addsd(first.AsFpuRegister<XmmRegister>(),
3638 codegen_->LiteralDoubleAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00003639 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003640 const_area->GetBaseMethodAddress(),
3641 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003642 } else {
3643 DCHECK(second.IsDoubleStackSlot());
3644 __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003645 }
3646 break;
3647 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003648
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003649 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003650 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003651 }
3652}
3653
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003654void LocationsBuilderX86::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003655 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003656 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003657 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003658 case DataType::Type::kInt32:
3659 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003660 locations->SetInAt(0, Location::RequiresRegister());
3661 locations->SetInAt(1, Location::Any());
3662 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003663 break;
3664 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003665 case DataType::Type::kFloat32:
3666 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003667 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003668 if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3669 DCHECK(sub->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003670 } else if (sub->InputAt(1)->IsConstant()) {
3671 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003672 } else {
3673 locations->SetInAt(1, Location::Any());
3674 }
Calin Juravle11351682014-10-23 15:38:15 +01003675 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003676 break;
Calin Juravle11351682014-10-23 15:38:15 +01003677 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003678
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003679 default:
Calin Juravle11351682014-10-23 15:38:15 +01003680 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003681 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003682}
3683
3684void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
3685 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003686 Location first = locations->InAt(0);
3687 Location second = locations->InAt(1);
Calin Juravle11351682014-10-23 15:38:15 +01003688 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003689 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003690 case DataType::Type::kInt32: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003691 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003692 __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003693 } else if (second.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00003694 __ subl(first.AsRegister<Register>(),
Vladimir Markocde64972023-04-25 16:40:06 +00003695 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003696 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003697 __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003698 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003699 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003700 }
3701
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003702 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003703 if (second.IsRegisterPair()) {
Calin Juravle11351682014-10-23 15:38:15 +01003704 __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3705 __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003706 } else if (second.IsDoubleStackSlot()) {
Calin Juravle11351682014-10-23 15:38:15 +01003707 __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003708 __ sbbl(first.AsRegisterPairHigh<Register>(),
3709 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003710 } else {
3711 DCHECK(second.IsConstant()) << second;
Vladimir Markocde64972023-04-25 16:40:06 +00003712 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003713 __ subl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3714 __ sbbl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003715 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003716 break;
3717 }
3718
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003719 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003720 if (second.IsFpuRegister()) {
3721 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3722 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003723 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003724 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003725 __ subss(first.AsFpuRegister<XmmRegister>(),
3726 codegen_->LiteralFloatAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00003727 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003728 const_area->GetBaseMethodAddress(),
3729 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003730 } else {
3731 DCHECK(second.IsStackSlot());
3732 __ subss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3733 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003734 break;
Calin Juravle11351682014-10-23 15:38:15 +01003735 }
3736
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003737 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003738 if (second.IsFpuRegister()) {
3739 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3740 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003741 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003742 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003743 __ subsd(first.AsFpuRegister<XmmRegister>(),
3744 codegen_->LiteralDoubleAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00003745 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003746 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003747 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3748 } else {
3749 DCHECK(second.IsDoubleStackSlot());
3750 __ subsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3751 }
Calin Juravle11351682014-10-23 15:38:15 +01003752 break;
3753 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003754
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003755 default:
Calin Juravle11351682014-10-23 15:38:15 +01003756 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003757 }
3758}
3759
Calin Juravle34bacdf2014-10-07 20:23:36 +01003760void LocationsBuilderX86::VisitMul(HMul* mul) {
3761 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003762 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003763 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003764 case DataType::Type::kInt32:
Calin Juravle34bacdf2014-10-07 20:23:36 +01003765 locations->SetInAt(0, Location::RequiresRegister());
3766 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003767 if (mul->InputAt(1)->IsIntConstant()) {
3768 // Can use 3 operand multiply.
3769 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3770 } else {
3771 locations->SetOut(Location::SameAsFirstInput());
3772 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003773 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003774 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003775 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003776 locations->SetInAt(1, Location::Any());
3777 locations->SetOut(Location::SameAsFirstInput());
3778 // Needed for imul on 32bits with 64bits output.
3779 locations->AddTemp(Location::RegisterLocation(EAX));
3780 locations->AddTemp(Location::RegisterLocation(EDX));
3781 break;
3782 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003783 case DataType::Type::kFloat32:
3784 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003785 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003786 if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3787 DCHECK(mul->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003788 } else if (mul->InputAt(1)->IsConstant()) {
3789 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003790 } else {
3791 locations->SetInAt(1, Location::Any());
3792 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003793 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003794 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003795 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003796
3797 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003798 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003799 }
3800}
3801
3802void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
3803 LocationSummary* locations = mul->GetLocations();
3804 Location first = locations->InAt(0);
3805 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003806 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003807
3808 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003809 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003810 // The constant may have ended up in a register, so test explicitly to avoid
3811 // problems where the output may not be the same as the first operand.
3812 if (mul->InputAt(1)->IsIntConstant()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003813 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003814 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3815 } else if (second.IsRegister()) {
3816 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003817 __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003818 } else {
3819 DCHECK(second.IsStackSlot());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003820 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003821 __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003822 }
3823 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003824
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003825 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003826 Register in1_hi = first.AsRegisterPairHigh<Register>();
3827 Register in1_lo = first.AsRegisterPairLow<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003828 Register eax = locations->GetTemp(0).AsRegister<Register>();
3829 Register edx = locations->GetTemp(1).AsRegister<Register>();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003830
3831 DCHECK_EQ(EAX, eax);
3832 DCHECK_EQ(EDX, edx);
3833
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003834 // input: in1 - 64 bits, in2 - 64 bits.
Calin Juravle34bacdf2014-10-07 20:23:36 +01003835 // output: in1
3836 // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3837 // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3838 // parts: in1.lo = (in1.lo * in2.lo)[31:0]
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003839 if (second.IsConstant()) {
3840 DCHECK(second.GetConstant()->IsLongConstant());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003841
Vladimir Markocde64972023-04-25 16:40:06 +00003842 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003843 int32_t low_value = Low32Bits(value);
3844 int32_t high_value = High32Bits(value);
3845 Immediate low(low_value);
3846 Immediate high(high_value);
3847
3848 __ movl(eax, high);
3849 // eax <- in1.lo * in2.hi
3850 __ imull(eax, in1_lo);
3851 // in1.hi <- in1.hi * in2.lo
3852 __ imull(in1_hi, low);
3853 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3854 __ addl(in1_hi, eax);
3855 // move in2_lo to eax to prepare for double precision
3856 __ movl(eax, low);
3857 // edx:eax <- in1.lo * in2.lo
3858 __ mull(in1_lo);
3859 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3860 __ addl(in1_hi, edx);
3861 // in1.lo <- (in1.lo * in2.lo)[31:0];
3862 __ movl(in1_lo, eax);
3863 } else if (second.IsRegisterPair()) {
3864 Register in2_hi = second.AsRegisterPairHigh<Register>();
3865 Register in2_lo = second.AsRegisterPairLow<Register>();
3866
3867 __ movl(eax, in2_hi);
3868 // eax <- in1.lo * in2.hi
3869 __ imull(eax, in1_lo);
3870 // in1.hi <- in1.hi * in2.lo
3871 __ imull(in1_hi, in2_lo);
3872 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3873 __ addl(in1_hi, eax);
3874 // move in1_lo to eax to prepare for double precision
3875 __ movl(eax, in1_lo);
3876 // edx:eax <- in1.lo * in2.lo
3877 __ mull(in2_lo);
3878 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3879 __ addl(in1_hi, edx);
3880 // in1.lo <- (in1.lo * in2.lo)[31:0];
3881 __ movl(in1_lo, eax);
3882 } else {
3883 DCHECK(second.IsDoubleStackSlot()) << second;
3884 Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
3885 Address in2_lo(ESP, second.GetStackIndex());
3886
3887 __ movl(eax, in2_hi);
3888 // eax <- in1.lo * in2.hi
3889 __ imull(eax, in1_lo);
3890 // in1.hi <- in1.hi * in2.lo
3891 __ imull(in1_hi, in2_lo);
3892 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3893 __ addl(in1_hi, eax);
3894 // move in1_lo to eax to prepare for double precision
3895 __ movl(eax, in1_lo);
3896 // edx:eax <- in1.lo * in2.lo
3897 __ mull(in2_lo);
3898 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3899 __ addl(in1_hi, edx);
3900 // in1.lo <- (in1.lo * in2.lo)[31:0];
3901 __ movl(in1_lo, eax);
3902 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003903
3904 break;
3905 }
3906
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003907 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003908 DCHECK(first.Equals(locations->Out()));
3909 if (second.IsFpuRegister()) {
3910 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3911 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003912 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003913 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003914 __ mulss(first.AsFpuRegister<XmmRegister>(),
3915 codegen_->LiteralFloatAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00003916 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003917 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003918 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3919 } else {
3920 DCHECK(second.IsStackSlot());
3921 __ mulss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3922 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003923 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003924 }
3925
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003926 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003927 DCHECK(first.Equals(locations->Out()));
3928 if (second.IsFpuRegister()) {
3929 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3930 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00003931 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003932 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003933 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3934 codegen_->LiteralDoubleAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00003935 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003936 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003937 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3938 } else {
3939 DCHECK(second.IsDoubleStackSlot());
3940 __ mulsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3941 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003942 break;
3943 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003944
3945 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003946 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003947 }
3948}
3949
Roland Levillain232ade02015-04-20 15:14:36 +01003950void InstructionCodeGeneratorX86::PushOntoFPStack(Location source,
3951 uint32_t temp_offset,
3952 uint32_t stack_adjustment,
3953 bool is_fp,
3954 bool is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003955 if (source.IsStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003956 DCHECK(!is_wide);
3957 if (is_fp) {
3958 __ flds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3959 } else {
3960 __ filds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3961 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003962 } else if (source.IsDoubleStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003963 DCHECK(is_wide);
3964 if (is_fp) {
3965 __ fldl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3966 } else {
3967 __ fildl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3968 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003969 } else {
3970 // Write the value to the temporary location on the stack and load to FP stack.
Roland Levillain232ade02015-04-20 15:14:36 +01003971 if (!is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003972 Location stack_temp = Location::StackSlot(temp_offset);
3973 codegen_->Move32(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003974 if (is_fp) {
3975 __ flds(Address(ESP, temp_offset));
3976 } else {
3977 __ filds(Address(ESP, temp_offset));
3978 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003979 } else {
3980 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3981 codegen_->Move64(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003982 if (is_fp) {
3983 __ fldl(Address(ESP, temp_offset));
3984 } else {
3985 __ fildl(Address(ESP, temp_offset));
3986 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003987 }
3988 }
3989}
3990
3991void InstructionCodeGeneratorX86::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003992 DataType::Type type = rem->GetResultType();
3993 bool is_float = type == DataType::Type::kFloat32;
3994 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003995 LocationSummary* locations = rem->GetLocations();
3996 Location first = locations->InAt(0);
3997 Location second = locations->InAt(1);
3998 Location out = locations->Out();
3999
4000 // Create stack space for 2 elements.
4001 // TODO: enhance register allocator to ask for stack temporaries.
Vladimir Markodec78172020-06-19 15:31:23 +01004002 codegen_->IncreaseFrame(2 * elem_size);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004003
4004 // Load the values to the FP stack in reverse order, using temporaries if needed.
Roland Levillain232ade02015-04-20 15:14:36 +01004005 const bool is_wide = !is_float;
Andreas Gampe3db70682018-12-26 15:12:03 -08004006 PushOntoFPStack(second, elem_size, 2 * elem_size, /* is_fp= */ true, is_wide);
4007 PushOntoFPStack(first, 0, 2 * elem_size, /* is_fp= */ true, is_wide);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004008
4009 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04004010 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004011 __ Bind(&retry);
4012 __ fprem();
4013
4014 // Move FP status to AX.
4015 __ fstsw();
4016
4017 // And see if the argument reduction is complete. This is signaled by the
4018 // C2 FPU flag bit set to 0.
4019 __ andl(EAX, Immediate(kC2ConditionMask));
4020 __ j(kNotEqual, &retry);
4021
4022 // We have settled on the final value. Retrieve it into an XMM register.
4023 // Store FP top of stack to real stack.
4024 if (is_float) {
4025 __ fsts(Address(ESP, 0));
4026 } else {
4027 __ fstl(Address(ESP, 0));
4028 }
4029
4030 // Pop the 2 items from the FP stack.
4031 __ fucompp();
4032
4033 // Load the value from the stack into an XMM register.
4034 DCHECK(out.IsFpuRegister()) << out;
4035 if (is_float) {
4036 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
4037 } else {
4038 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
4039 }
4040
4041 // And remove the temporary stack space we allocated.
Vladimir Markodec78172020-06-19 15:31:23 +01004042 codegen_->DecreaseFrame(2 * elem_size);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004043}
4044
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004045
4046void InstructionCodeGeneratorX86::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
4047 DCHECK(instruction->IsDiv() || instruction->IsRem());
4048
4049 LocationSummary* locations = instruction->GetLocations();
4050 DCHECK(locations->InAt(1).IsConstant());
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004051 DCHECK(locations->InAt(1).GetConstant()->IsIntConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004052
4053 Register out_register = locations->Out().AsRegister<Register>();
4054 Register input_register = locations->InAt(0).AsRegister<Register>();
Vladimir Markocde64972023-04-25 16:40:06 +00004055 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004056
4057 DCHECK(imm == 1 || imm == -1);
4058
4059 if (instruction->IsRem()) {
4060 __ xorl(out_register, out_register);
4061 } else {
4062 __ movl(out_register, input_register);
4063 if (imm == -1) {
4064 __ negl(out_register);
4065 }
4066 }
4067}
4068
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05304069void InstructionCodeGeneratorX86::RemByPowerOfTwo(HRem* instruction) {
4070 LocationSummary* locations = instruction->GetLocations();
4071 Location second = locations->InAt(1);
4072
4073 Register out = locations->Out().AsRegister<Register>();
4074 Register numerator = locations->InAt(0).AsRegister<Register>();
4075
4076 int32_t imm = Int64FromConstant(second.GetConstant());
4077 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
4078 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
4079
4080 Register tmp = locations->GetTemp(0).AsRegister<Register>();
4081 NearLabel done;
4082 __ movl(out, numerator);
4083 __ andl(out, Immediate(abs_imm-1));
4084 __ j(Condition::kZero, &done);
4085 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
4086 __ testl(numerator, numerator);
4087 __ cmovl(Condition::kLess, out, tmp);
4088 __ Bind(&done);
4089}
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004090
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004091void InstructionCodeGeneratorX86::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004092 LocationSummary* locations = instruction->GetLocations();
4093
4094 Register out_register = locations->Out().AsRegister<Register>();
4095 Register input_register = locations->InAt(0).AsRegister<Register>();
Vladimir Markocde64972023-04-25 16:40:06 +00004096 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004097 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
4098 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004099
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004100 Register num = locations->GetTemp(0).AsRegister<Register>();
4101
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004102 __ leal(num, Address(input_register, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004103 __ testl(input_register, input_register);
4104 __ cmovl(kGreaterEqual, num, input_register);
4105 int shift = CTZ(imm);
4106 __ sarl(num, Immediate(shift));
4107
4108 if (imm < 0) {
4109 __ negl(num);
4110 }
4111
4112 __ movl(out_register, num);
4113}
4114
4115void InstructionCodeGeneratorX86::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
4116 DCHECK(instruction->IsDiv() || instruction->IsRem());
4117
4118 LocationSummary* locations = instruction->GetLocations();
Vladimir Markocde64972023-04-25 16:40:06 +00004119 int imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004120
4121 Register eax = locations->InAt(0).AsRegister<Register>();
4122 Register out = locations->Out().AsRegister<Register>();
4123 Register num;
4124 Register edx;
4125
4126 if (instruction->IsDiv()) {
4127 edx = locations->GetTemp(0).AsRegister<Register>();
4128 num = locations->GetTemp(1).AsRegister<Register>();
4129 } else {
4130 edx = locations->Out().AsRegister<Register>();
4131 num = locations->GetTemp(0).AsRegister<Register>();
4132 }
4133
4134 DCHECK_EQ(EAX, eax);
4135 DCHECK_EQ(EDX, edx);
4136 if (instruction->IsDiv()) {
4137 DCHECK_EQ(EAX, out);
4138 } else {
4139 DCHECK_EQ(EDX, out);
4140 }
4141
4142 int64_t magic;
4143 int shift;
Andreas Gampe3db70682018-12-26 15:12:03 -08004144 CalculateMagicAndShiftForDivRem(imm, /* is_long= */ false, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004145
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004146 // Save the numerator.
4147 __ movl(num, eax);
4148
4149 // EAX = magic
4150 __ movl(eax, Immediate(magic));
4151
4152 // EDX:EAX = magic * numerator
4153 __ imull(num);
4154
4155 if (imm > 0 && magic < 0) {
4156 // EDX += num
4157 __ addl(edx, num);
4158 } else if (imm < 0 && magic > 0) {
4159 __ subl(edx, num);
4160 }
4161
4162 // Shift if needed.
4163 if (shift != 0) {
4164 __ sarl(edx, Immediate(shift));
4165 }
4166
4167 // EDX += 1 if EDX < 0
4168 __ movl(eax, edx);
4169 __ shrl(edx, Immediate(31));
4170 __ addl(edx, eax);
4171
4172 if (instruction->IsRem()) {
4173 __ movl(eax, num);
4174 __ imull(edx, Immediate(imm));
4175 __ subl(eax, edx);
4176 __ movl(edx, eax);
4177 } else {
4178 __ movl(eax, edx);
4179 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004180}
4181
Calin Juravlebacfec32014-11-14 15:54:36 +00004182void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
4183 DCHECK(instruction->IsDiv() || instruction->IsRem());
4184
4185 LocationSummary* locations = instruction->GetLocations();
4186 Location out = locations->Out();
4187 Location first = locations->InAt(0);
4188 Location second = locations->InAt(1);
4189 bool is_div = instruction->IsDiv();
4190
4191 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004192 case DataType::Type::kInt32: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004193 DCHECK_EQ(EAX, first.AsRegister<Register>());
4194 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
Calin Juravlebacfec32014-11-14 15:54:36 +00004195
Vladimir Marko13c86fd2015-11-11 12:37:46 +00004196 if (second.IsConstant()) {
Vladimir Markocde64972023-04-25 16:40:06 +00004197 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004198
4199 if (imm == 0) {
4200 // Do not generate anything for 0. DivZeroCheck would forbid any generated code.
4201 } else if (imm == 1 || imm == -1) {
4202 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05304203 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
4204 if (is_div) {
Vladimir Markocde64972023-04-25 16:40:06 +00004205 DivByPowerOfTwo(instruction->AsDiv());
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05304206 } else {
Vladimir Markocde64972023-04-25 16:40:06 +00004207 RemByPowerOfTwo(instruction->AsRem());
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05304208 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004209 } else {
4210 DCHECK(imm <= -2 || imm >= 2);
4211 GenerateDivRemWithAnyConstant(instruction);
4212 }
4213 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004214 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86(
David Srbecky9cd6d372016-02-09 15:24:47 +00004215 instruction, out.AsRegister<Register>(), is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004216 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00004217
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004218 Register second_reg = second.AsRegister<Register>();
4219 // 0x80000000/-1 triggers an arithmetic exception!
4220 // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
4221 // it's safe to just use negl instead of more complex comparisons.
Calin Juravlebacfec32014-11-14 15:54:36 +00004222
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004223 __ cmpl(second_reg, Immediate(-1));
4224 __ j(kEqual, slow_path->GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +00004225
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004226 // edx:eax <- sign-extended of eax
4227 __ cdq();
4228 // eax = quotient, edx = remainder
4229 __ idivl(second_reg);
4230 __ Bind(slow_path->GetExitLabel());
4231 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004232 break;
4233 }
4234
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004235 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004236 InvokeRuntimeCallingConvention calling_convention;
4237 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
4238 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
4239 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
4240 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
4241 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
4242 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
4243
4244 if (is_div) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004245 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004246 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00004247 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004248 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004249 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00004250 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004251 break;
4252 }
4253
4254 default:
4255 LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
4256 }
4257}
4258
Calin Juravle7c4954d2014-10-28 16:57:40 +00004259void LocationsBuilderX86::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004260 LocationSummary::CallKind call_kind = (div->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004261 ? LocationSummary::kCallOnMainOnly
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004262 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01004263 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(div, call_kind);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004264
Calin Juravle7c4954d2014-10-28 16:57:40 +00004265 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004266 case DataType::Type::kInt32: {
Calin Juravled0d48522014-11-04 16:40:20 +00004267 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004268 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00004269 locations->SetOut(Location::SameAsFirstInput());
4270 // Intel uses edx:eax as the dividend.
4271 locations->AddTemp(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004272 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
4273 // which enforces results to be in EAX and EDX, things are simpler if we use EAX also as
4274 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004275 if (div->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004276 locations->AddTemp(Location::RequiresRegister());
4277 }
Calin Juravled0d48522014-11-04 16:40:20 +00004278 break;
4279 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004280 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004281 InvokeRuntimeCallingConvention calling_convention;
4282 locations->SetInAt(0, Location::RegisterPairLocation(
4283 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4284 locations->SetInAt(1, Location::RegisterPairLocation(
4285 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
4286 // Runtime helper puts the result in EAX, EDX.
4287 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Calin Juravle7c4954d2014-10-28 16:57:40 +00004288 break;
4289 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004290 case DataType::Type::kFloat32:
4291 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00004292 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00004293 if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
4294 DCHECK(div->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00004295 } else if (div->InputAt(1)->IsConstant()) {
4296 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00004297 } else {
4298 locations->SetInAt(1, Location::Any());
4299 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004300 locations->SetOut(Location::SameAsFirstInput());
4301 break;
4302 }
4303
4304 default:
4305 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4306 }
4307}
4308
4309void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
4310 LocationSummary* locations = div->GetLocations();
4311 Location first = locations->InAt(0);
4312 Location second = locations->InAt(1);
Calin Juravle7c4954d2014-10-28 16:57:40 +00004313
4314 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004315 case DataType::Type::kInt32:
4316 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004317 GenerateDivRemIntegral(div);
Calin Juravle7c4954d2014-10-28 16:57:40 +00004318 break;
4319 }
4320
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004321 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04004322 if (second.IsFpuRegister()) {
4323 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
4324 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00004325 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00004326 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04004327 __ divss(first.AsFpuRegister<XmmRegister>(),
4328 codegen_->LiteralFloatAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00004329 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004330 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04004331 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
4332 } else {
4333 DCHECK(second.IsStackSlot());
4334 __ divss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
4335 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004336 break;
4337 }
4338
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004339 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04004340 if (second.IsFpuRegister()) {
4341 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
4342 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
Vladimir Markocde64972023-04-25 16:40:06 +00004343 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00004344 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04004345 __ divsd(first.AsFpuRegister<XmmRegister>(),
4346 codegen_->LiteralDoubleAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00004347 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004348 const_area->GetBaseMethodAddress(),
4349 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04004350 } else {
4351 DCHECK(second.IsDoubleStackSlot());
4352 __ divsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
4353 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004354 break;
4355 }
4356
4357 default:
4358 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4359 }
4360}
4361
Calin Juravlebacfec32014-11-14 15:54:36 +00004362void LocationsBuilderX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004363 DataType::Type type = rem->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004364
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004365 LocationSummary::CallKind call_kind = (rem->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004366 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004367 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01004368 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Calin Juravlebacfec32014-11-14 15:54:36 +00004369
Calin Juravled2ec87d2014-12-08 14:24:46 +00004370 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004371 case DataType::Type::kInt32: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004372 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004373 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00004374 locations->SetOut(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004375 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
4376 // which enforces results to be in EAX and EDX, things are simpler if we use EDX also as
4377 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01004378 if (rem->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004379 locations->AddTemp(Location::RequiresRegister());
4380 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004381 break;
4382 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004383 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004384 InvokeRuntimeCallingConvention calling_convention;
4385 locations->SetInAt(0, Location::RegisterPairLocation(
4386 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4387 locations->SetInAt(1, Location::RegisterPairLocation(
4388 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
4389 // Runtime helper puts the result in EAX, EDX.
4390 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
4391 break;
4392 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004393 case DataType::Type::kFloat64:
4394 case DataType::Type::kFloat32: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004395 locations->SetInAt(0, Location::Any());
4396 locations->SetInAt(1, Location::Any());
4397 locations->SetOut(Location::RequiresFpuRegister());
4398 locations->AddTemp(Location::RegisterLocation(EAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00004399 break;
4400 }
4401
4402 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00004403 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00004404 }
4405}
4406
4407void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004408 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00004409 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004410 case DataType::Type::kInt32:
4411 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004412 GenerateDivRemIntegral(rem);
4413 break;
4414 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004415 case DataType::Type::kFloat32:
4416 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004417 GenerateRemFP(rem);
Calin Juravlebacfec32014-11-14 15:54:36 +00004418 break;
4419 }
4420 default:
4421 LOG(FATAL) << "Unexpected rem type " << type;
4422 }
4423}
4424
Aart Bik1f8d51b2018-02-15 10:42:37 -08004425static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4426 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4427 switch (minmax->GetResultType()) {
4428 case DataType::Type::kInt32:
4429 locations->SetInAt(0, Location::RequiresRegister());
4430 locations->SetInAt(1, Location::RequiresRegister());
4431 locations->SetOut(Location::SameAsFirstInput());
4432 break;
4433 case DataType::Type::kInt64:
4434 locations->SetInAt(0, Location::RequiresRegister());
4435 locations->SetInAt(1, Location::RequiresRegister());
4436 locations->SetOut(Location::SameAsFirstInput());
4437 // Register to use to perform a long subtract to set cc.
4438 locations->AddTemp(Location::RequiresRegister());
4439 break;
4440 case DataType::Type::kFloat32:
4441 locations->SetInAt(0, Location::RequiresFpuRegister());
4442 locations->SetInAt(1, Location::RequiresFpuRegister());
4443 locations->SetOut(Location::SameAsFirstInput());
4444 locations->AddTemp(Location::RequiresRegister());
4445 break;
4446 case DataType::Type::kFloat64:
4447 locations->SetInAt(0, Location::RequiresFpuRegister());
4448 locations->SetInAt(1, Location::RequiresFpuRegister());
4449 locations->SetOut(Location::SameAsFirstInput());
4450 break;
4451 default:
4452 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4453 }
4454}
4455
Aart Bik351df3e2018-03-07 11:54:57 -08004456void InstructionCodeGeneratorX86::GenerateMinMaxInt(LocationSummary* locations,
4457 bool is_min,
4458 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004459 Location op1_loc = locations->InAt(0);
4460 Location op2_loc = locations->InAt(1);
4461
4462 // Shortcut for same input locations.
4463 if (op1_loc.Equals(op2_loc)) {
4464 // Can return immediately, as op1_loc == out_loc.
4465 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
4466 // a copy here.
4467 DCHECK(locations->Out().Equals(op1_loc));
4468 return;
4469 }
4470
4471 if (type == DataType::Type::kInt64) {
4472 // Need to perform a subtract to get the sign right.
4473 // op1 is already in the same location as the output.
4474 Location output = locations->Out();
4475 Register output_lo = output.AsRegisterPairLow<Register>();
4476 Register output_hi = output.AsRegisterPairHigh<Register>();
4477
4478 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
4479 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
4480
4481 // The comparison is performed by subtracting the second operand from
4482 // the first operand and then setting the status flags in the same
4483 // manner as the SUB instruction."
4484 __ cmpl(output_lo, op2_lo);
4485
4486 // Now use a temp and the borrow to finish the subtraction of op2_hi.
4487 Register temp = locations->GetTemp(0).AsRegister<Register>();
4488 __ movl(temp, output_hi);
4489 __ sbbl(temp, op2_hi);
4490
4491 // Now the condition code is correct.
4492 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
4493 __ cmovl(cond, output_lo, op2_lo);
4494 __ cmovl(cond, output_hi, op2_hi);
4495 } else {
4496 DCHECK_EQ(type, DataType::Type::kInt32);
4497 Register out = locations->Out().AsRegister<Register>();
4498 Register op2 = op2_loc.AsRegister<Register>();
4499
4500 // (out := op1)
4501 // out <=? op2
4502 // if out is min jmp done
4503 // out := op2
4504 // done:
4505
4506 __ cmpl(out, op2);
4507 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
4508 __ cmovl(cond, out, op2);
4509 }
4510}
4511
4512void InstructionCodeGeneratorX86::GenerateMinMaxFP(LocationSummary* locations,
4513 bool is_min,
4514 DataType::Type type) {
4515 Location op1_loc = locations->InAt(0);
4516 Location op2_loc = locations->InAt(1);
4517 Location out_loc = locations->Out();
4518 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4519
4520 // Shortcut for same input locations.
4521 if (op1_loc.Equals(op2_loc)) {
4522 DCHECK(out_loc.Equals(op1_loc));
4523 return;
4524 }
4525
4526 // (out := op1)
4527 // out <=? op2
4528 // if Nan jmp Nan_label
4529 // if out is min jmp done
4530 // if op2 is min jmp op2_label
4531 // handle -0/+0
4532 // jmp done
4533 // Nan_label:
4534 // out := NaN
4535 // op2_label:
4536 // out := op2
4537 // done:
4538 //
4539 // This removes one jmp, but needs to copy one input (op1) to out.
4540 //
4541 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
4542
4543 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4544
4545 NearLabel nan, done, op2_label;
4546 if (type == DataType::Type::kFloat64) {
4547 __ ucomisd(out, op2);
4548 } else {
4549 DCHECK_EQ(type, DataType::Type::kFloat32);
4550 __ ucomiss(out, op2);
4551 }
4552
4553 __ j(Condition::kParityEven, &nan);
4554
4555 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4556 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4557
4558 // Handle 0.0/-0.0.
4559 if (is_min) {
4560 if (type == DataType::Type::kFloat64) {
4561 __ orpd(out, op2);
4562 } else {
4563 __ orps(out, op2);
4564 }
4565 } else {
4566 if (type == DataType::Type::kFloat64) {
4567 __ andpd(out, op2);
4568 } else {
4569 __ andps(out, op2);
4570 }
4571 }
4572 __ jmp(&done);
4573
4574 // NaN handling.
4575 __ Bind(&nan);
4576 if (type == DataType::Type::kFloat64) {
4577 // TODO: Use a constant from the constant table (requires extra input).
4578 __ LoadLongConstant(out, kDoubleNaN);
4579 } else {
4580 Register constant = locations->GetTemp(0).AsRegister<Register>();
4581 __ movl(constant, Immediate(kFloatNaN));
4582 __ movd(out, constant);
4583 }
4584 __ jmp(&done);
4585
4586 // out := op2;
4587 __ Bind(&op2_label);
4588 if (type == DataType::Type::kFloat64) {
4589 __ movsd(out, op2);
4590 } else {
4591 __ movss(out, op2);
4592 }
4593
4594 // Done.
4595 __ Bind(&done);
4596}
4597
Aart Bik351df3e2018-03-07 11:54:57 -08004598void InstructionCodeGeneratorX86::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4599 DataType::Type type = minmax->GetResultType();
4600 switch (type) {
4601 case DataType::Type::kInt32:
4602 case DataType::Type::kInt64:
4603 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4604 break;
4605 case DataType::Type::kFloat32:
4606 case DataType::Type::kFloat64:
4607 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4608 break;
4609 default:
4610 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4611 }
4612}
4613
Aart Bik1f8d51b2018-02-15 10:42:37 -08004614void LocationsBuilderX86::VisitMin(HMin* min) {
4615 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4616}
4617
4618void InstructionCodeGeneratorX86::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004619 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004620}
4621
4622void LocationsBuilderX86::VisitMax(HMax* max) {
4623 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4624}
4625
4626void InstructionCodeGeneratorX86::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004627 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004628}
4629
Aart Bik3dad3412018-02-28 12:01:46 -08004630void LocationsBuilderX86::VisitAbs(HAbs* abs) {
4631 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4632 switch (abs->GetResultType()) {
4633 case DataType::Type::kInt32:
4634 locations->SetInAt(0, Location::RegisterLocation(EAX));
4635 locations->SetOut(Location::SameAsFirstInput());
4636 locations->AddTemp(Location::RegisterLocation(EDX));
4637 break;
4638 case DataType::Type::kInt64:
4639 locations->SetInAt(0, Location::RequiresRegister());
4640 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4641 locations->AddTemp(Location::RequiresRegister());
4642 break;
4643 case DataType::Type::kFloat32:
4644 locations->SetInAt(0, Location::RequiresFpuRegister());
4645 locations->SetOut(Location::SameAsFirstInput());
4646 locations->AddTemp(Location::RequiresFpuRegister());
4647 locations->AddTemp(Location::RequiresRegister());
4648 break;
4649 case DataType::Type::kFloat64:
4650 locations->SetInAt(0, Location::RequiresFpuRegister());
4651 locations->SetOut(Location::SameAsFirstInput());
4652 locations->AddTemp(Location::RequiresFpuRegister());
4653 break;
4654 default:
4655 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4656 }
4657}
4658
4659void InstructionCodeGeneratorX86::VisitAbs(HAbs* abs) {
4660 LocationSummary* locations = abs->GetLocations();
4661 switch (abs->GetResultType()) {
4662 case DataType::Type::kInt32: {
4663 Register out = locations->Out().AsRegister<Register>();
4664 DCHECK_EQ(out, EAX);
4665 Register temp = locations->GetTemp(0).AsRegister<Register>();
4666 DCHECK_EQ(temp, EDX);
4667 // Sign extend EAX into EDX.
4668 __ cdq();
4669 // XOR EAX with sign.
4670 __ xorl(EAX, EDX);
4671 // Subtract out sign to correct.
4672 __ subl(EAX, EDX);
4673 // The result is in EAX.
4674 break;
4675 }
4676 case DataType::Type::kInt64: {
4677 Location input = locations->InAt(0);
4678 Register input_lo = input.AsRegisterPairLow<Register>();
4679 Register input_hi = input.AsRegisterPairHigh<Register>();
4680 Location output = locations->Out();
4681 Register output_lo = output.AsRegisterPairLow<Register>();
4682 Register output_hi = output.AsRegisterPairHigh<Register>();
4683 Register temp = locations->GetTemp(0).AsRegister<Register>();
4684 // Compute the sign into the temporary.
4685 __ movl(temp, input_hi);
4686 __ sarl(temp, Immediate(31));
4687 // Store the sign into the output.
4688 __ movl(output_lo, temp);
4689 __ movl(output_hi, temp);
4690 // XOR the input to the output.
4691 __ xorl(output_lo, input_lo);
4692 __ xorl(output_hi, input_hi);
4693 // Subtract the sign.
4694 __ subl(output_lo, temp);
4695 __ sbbl(output_hi, temp);
4696 break;
4697 }
4698 case DataType::Type::kFloat32: {
4699 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4700 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4701 Register constant = locations->GetTemp(1).AsRegister<Register>();
4702 __ movl(constant, Immediate(INT32_C(0x7FFFFFFF)));
4703 __ movd(temp, constant);
4704 __ andps(out, temp);
4705 break;
4706 }
4707 case DataType::Type::kFloat64: {
4708 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4709 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4710 // TODO: Use a constant from the constant table (requires extra input).
4711 __ LoadLongConstant(temp, INT64_C(0x7FFFFFFFFFFFFFFF));
4712 __ andpd(out, temp);
4713 break;
4714 }
4715 default:
4716 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4717 }
4718}
4719
Calin Juravled0d48522014-11-04 16:40:20 +00004720void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004721 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004722 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004723 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004724 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004725 case DataType::Type::kInt8:
4726 case DataType::Type::kUint16:
4727 case DataType::Type::kInt16:
4728 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004729 locations->SetInAt(0, Location::Any());
4730 break;
4731 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004732 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004733 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
4734 if (!instruction->IsConstant()) {
4735 locations->AddTemp(Location::RequiresRegister());
4736 }
4737 break;
4738 }
4739 default:
4740 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
4741 }
Calin Juravled0d48522014-11-04 16:40:20 +00004742}
4743
4744void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004745 SlowPathCode* slow_path =
4746 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004747 codegen_->AddSlowPath(slow_path);
4748
4749 LocationSummary* locations = instruction->GetLocations();
4750 Location value = locations->InAt(0);
4751
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004752 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004753 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004754 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004755 case DataType::Type::kInt8:
4756 case DataType::Type::kUint16:
4757 case DataType::Type::kInt16:
4758 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004759 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004760 __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004761 __ j(kEqual, slow_path->GetEntryLabel());
4762 } else if (value.IsStackSlot()) {
4763 __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
4764 __ j(kEqual, slow_path->GetEntryLabel());
4765 } else {
4766 DCHECK(value.IsConstant()) << value;
Vladimir Markocde64972023-04-25 16:40:06 +00004767 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004768 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004769 }
4770 }
4771 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004772 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004773 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004774 if (value.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004775 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004776 __ movl(temp, value.AsRegisterPairLow<Register>());
4777 __ orl(temp, value.AsRegisterPairHigh<Register>());
4778 __ j(kEqual, slow_path->GetEntryLabel());
4779 } else {
4780 DCHECK(value.IsConstant()) << value;
Vladimir Markocde64972023-04-25 16:40:06 +00004781 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004782 __ jmp(slow_path->GetEntryLabel());
4783 }
4784 }
4785 break;
4786 }
4787 default:
4788 LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004789 }
Calin Juravled0d48522014-11-04 16:40:20 +00004790}
4791
Calin Juravle9aec02f2014-11-18 23:06:35 +00004792void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
4793 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4794
4795 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004796 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004797
4798 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004799 case DataType::Type::kInt32:
4800 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004801 // Can't have Location::Any() and output SameAsFirstInput()
Calin Juravle9aec02f2014-11-18 23:06:35 +00004802 locations->SetInAt(0, Location::RequiresRegister());
Mark P Mendell73945692015-04-29 14:56:17 +00004803 // The shift count needs to be in CL or a constant.
4804 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
Calin Juravle9aec02f2014-11-18 23:06:35 +00004805 locations->SetOut(Location::SameAsFirstInput());
4806 break;
4807 }
4808 default:
4809 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4810 }
4811}
4812
4813void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
4814 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4815
4816 LocationSummary* locations = op->GetLocations();
4817 Location first = locations->InAt(0);
4818 Location second = locations->InAt(1);
4819 DCHECK(first.Equals(locations->Out()));
4820
4821 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004822 case DataType::Type::kInt32: {
Mark P Mendell73945692015-04-29 14:56:17 +00004823 DCHECK(first.IsRegister());
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004824 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004825 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004826 Register second_reg = second.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004827 DCHECK_EQ(ECX, second_reg);
4828 if (op->IsShl()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004829 __ shll(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004830 } else if (op->IsShr()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004831 __ sarl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004832 } else {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004833 __ shrl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004834 }
4835 } else {
Vladimir Markocde64972023-04-25 16:40:06 +00004836 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004837 if (shift == 0) {
4838 return;
4839 }
4840 Immediate imm(shift);
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004841 if (op->IsShl()) {
4842 __ shll(first_reg, imm);
4843 } else if (op->IsShr()) {
4844 __ sarl(first_reg, imm);
4845 } else {
4846 __ shrl(first_reg, imm);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004847 }
4848 }
4849 break;
4850 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004851 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004852 if (second.IsRegister()) {
4853 Register second_reg = second.AsRegister<Register>();
4854 DCHECK_EQ(ECX, second_reg);
4855 if (op->IsShl()) {
4856 GenerateShlLong(first, second_reg);
4857 } else if (op->IsShr()) {
4858 GenerateShrLong(first, second_reg);
4859 } else {
4860 GenerateUShrLong(first, second_reg);
4861 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004862 } else {
Mark P Mendell73945692015-04-29 14:56:17 +00004863 // Shift by a constant.
Vladimir Markocde64972023-04-25 16:40:06 +00004864 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004865 // Nothing to do if the shift is 0, as the input is already the output.
4866 if (shift != 0) {
4867 if (op->IsShl()) {
4868 GenerateShlLong(first, shift);
4869 } else if (op->IsShr()) {
4870 GenerateShrLong(first, shift);
4871 } else {
4872 GenerateUShrLong(first, shift);
4873 }
4874 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004875 }
4876 break;
4877 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00004878 default:
4879 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4880 }
4881}
4882
Mark P Mendell73945692015-04-29 14:56:17 +00004883void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, int shift) {
4884 Register low = loc.AsRegisterPairLow<Register>();
4885 Register high = loc.AsRegisterPairHigh<Register>();
Mark Mendellba56d062015-05-05 21:34:03 -04004886 if (shift == 1) {
4887 // This is just an addition.
4888 __ addl(low, low);
4889 __ adcl(high, high);
4890 } else if (shift == 32) {
Mark P Mendell73945692015-04-29 14:56:17 +00004891 // Shift by 32 is easy. High gets low, and low gets 0.
4892 codegen_->EmitParallelMoves(
4893 loc.ToLow(),
4894 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004895 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004896 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4897 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004898 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004899 } else if (shift > 32) {
4900 // Low part becomes 0. High part is low part << (shift-32).
4901 __ movl(high, low);
4902 __ shll(high, Immediate(shift - 32));
4903 __ xorl(low, low);
4904 } else {
4905 // Between 1 and 31.
4906 __ shld(high, low, Immediate(shift));
4907 __ shll(low, Immediate(shift));
4908 }
4909}
4910
Calin Juravle9aec02f2014-11-18 23:06:35 +00004911void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004912 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004913 __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
4914 __ shll(loc.AsRegisterPairLow<Register>(), shifter);
4915 __ testl(shifter, Immediate(32));
4916 __ j(kEqual, &done);
4917 __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
4918 __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
4919 __ Bind(&done);
4920}
4921
Mark P Mendell73945692015-04-29 14:56:17 +00004922void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, int shift) {
4923 Register low = loc.AsRegisterPairLow<Register>();
4924 Register high = loc.AsRegisterPairHigh<Register>();
4925 if (shift == 32) {
4926 // Need to copy the sign.
4927 DCHECK_NE(low, high);
4928 __ movl(low, high);
4929 __ sarl(high, Immediate(31));
4930 } else if (shift > 32) {
4931 DCHECK_NE(low, high);
4932 // High part becomes sign. Low part is shifted by shift - 32.
4933 __ movl(low, high);
4934 __ sarl(high, Immediate(31));
4935 __ sarl(low, Immediate(shift - 32));
4936 } else {
4937 // Between 1 and 31.
4938 __ shrd(low, high, Immediate(shift));
4939 __ sarl(high, Immediate(shift));
4940 }
4941}
4942
Calin Juravle9aec02f2014-11-18 23:06:35 +00004943void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004944 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004945 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4946 __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
4947 __ testl(shifter, Immediate(32));
4948 __ j(kEqual, &done);
4949 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4950 __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
4951 __ Bind(&done);
4952}
4953
Mark P Mendell73945692015-04-29 14:56:17 +00004954void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, int shift) {
4955 Register low = loc.AsRegisterPairLow<Register>();
4956 Register high = loc.AsRegisterPairHigh<Register>();
4957 if (shift == 32) {
4958 // Shift by 32 is easy. Low gets high, and high gets 0.
4959 codegen_->EmitParallelMoves(
4960 loc.ToHigh(),
4961 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004962 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004963 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4964 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004965 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004966 } else if (shift > 32) {
4967 // Low part is high >> (shift - 32). High part becomes 0.
4968 __ movl(low, high);
4969 __ shrl(low, Immediate(shift - 32));
4970 __ xorl(high, high);
4971 } else {
4972 // Between 1 and 31.
4973 __ shrd(low, high, Immediate(shift));
4974 __ shrl(high, Immediate(shift));
4975 }
4976}
4977
Calin Juravle9aec02f2014-11-18 23:06:35 +00004978void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004979 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004980 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4981 __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
4982 __ testl(shifter, Immediate(32));
4983 __ j(kEqual, &done);
4984 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4985 __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
4986 __ Bind(&done);
4987}
4988
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004989void LocationsBuilderX86::VisitRor(HRor* ror) {
4990 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004991 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004992
4993 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004994 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004995 // Add the temporary needed.
4996 locations->AddTemp(Location::RequiresRegister());
4997 FALLTHROUGH_INTENDED;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004998 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004999 locations->SetInAt(0, Location::RequiresRegister());
5000 // The shift count needs to be in CL (unless it is a constant).
5001 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, ror->InputAt(1)));
5002 locations->SetOut(Location::SameAsFirstInput());
5003 break;
5004 default:
5005 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
5006 UNREACHABLE();
5007 }
5008}
5009
5010void InstructionCodeGeneratorX86::VisitRor(HRor* ror) {
5011 LocationSummary* locations = ror->GetLocations();
5012 Location first = locations->InAt(0);
5013 Location second = locations->InAt(1);
5014
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005015 if (ror->GetResultType() == DataType::Type::kInt32) {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005016 Register first_reg = first.AsRegister<Register>();
5017 if (second.IsRegister()) {
5018 Register second_reg = second.AsRegister<Register>();
5019 __ rorl(first_reg, second_reg);
5020 } else {
Vladimir Markocde64972023-04-25 16:40:06 +00005021 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005022 __ rorl(first_reg, imm);
5023 }
5024 return;
5025 }
5026
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005027 DCHECK_EQ(ror->GetResultType(), DataType::Type::kInt64);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005028 Register first_reg_lo = first.AsRegisterPairLow<Register>();
5029 Register first_reg_hi = first.AsRegisterPairHigh<Register>();
5030 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
5031 if (second.IsRegister()) {
5032 Register second_reg = second.AsRegister<Register>();
5033 DCHECK_EQ(second_reg, ECX);
5034 __ movl(temp_reg, first_reg_hi);
5035 __ shrd(first_reg_hi, first_reg_lo, second_reg);
5036 __ shrd(first_reg_lo, temp_reg, second_reg);
5037 __ movl(temp_reg, first_reg_hi);
5038 __ testl(second_reg, Immediate(32));
5039 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
5040 __ cmovl(kNotEqual, first_reg_lo, temp_reg);
5041 } else {
Vladimir Markocde64972023-04-25 16:40:06 +00005042 int32_t shift_amt = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005043 if (shift_amt == 0) {
5044 // Already fine.
5045 return;
5046 }
5047 if (shift_amt == 32) {
5048 // Just swap.
5049 __ movl(temp_reg, first_reg_lo);
5050 __ movl(first_reg_lo, first_reg_hi);
5051 __ movl(first_reg_hi, temp_reg);
5052 return;
5053 }
5054
5055 Immediate imm(shift_amt);
5056 // Save the constents of the low value.
5057 __ movl(temp_reg, first_reg_lo);
5058
5059 // Shift right into low, feeding bits from high.
5060 __ shrd(first_reg_lo, first_reg_hi, imm);
5061
5062 // Shift right into high, feeding bits from the original low.
5063 __ shrd(first_reg_hi, temp_reg, imm);
5064
5065 // Swap if needed.
5066 if (shift_amt > 32) {
5067 __ movl(temp_reg, first_reg_lo);
5068 __ movl(first_reg_lo, first_reg_hi);
5069 __ movl(first_reg_hi, temp_reg);
5070 }
5071 }
5072}
5073
Calin Juravle9aec02f2014-11-18 23:06:35 +00005074void LocationsBuilderX86::VisitShl(HShl* shl) {
5075 HandleShift(shl);
5076}
5077
5078void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
5079 HandleShift(shl);
5080}
5081
5082void LocationsBuilderX86::VisitShr(HShr* shr) {
5083 HandleShift(shr);
5084}
5085
5086void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
5087 HandleShift(shr);
5088}
5089
5090void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
5091 HandleShift(ushr);
5092}
5093
5094void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
5095 HandleShift(ushr);
5096}
5097
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01005098void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005099 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5100 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01005101 locations->SetOut(Location::RegisterLocation(EAX));
Alex Lightd109e302018-06-27 10:25:41 -07005102 InvokeRuntimeCallingConvention calling_convention;
5103 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01005104}
5105
5106void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005107 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5108 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
5109 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01005110}
5111
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005112void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005113 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5114 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005115 locations->SetOut(Location::RegisterLocation(EAX));
5116 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005117 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5118 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005119}
5120
5121void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01005122 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
5123 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayd0958442017-01-30 14:57:16 +00005124 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005125 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005126 DCHECK(!codegen_->IsLeafMethod());
5127}
5128
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005129void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005130 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005131 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01005132 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5133 if (location.IsStackSlot()) {
5134 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5135 } else if (location.IsDoubleStackSlot()) {
5136 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005137 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01005138 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005139}
5140
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005141void InstructionCodeGeneratorX86::VisitParameterValue(
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00005142 [[maybe_unused]] HParameterValue* instruction) {}
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005143
5144void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
5145 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005146 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005147 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
5148}
5149
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00005150void InstructionCodeGeneratorX86::VisitCurrentMethod([[maybe_unused]] HCurrentMethod* instruction) {
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005151}
5152
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005153void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
5154 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005155 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005156 locations->SetInAt(0, Location::RequiresRegister());
5157 locations->SetOut(Location::RequiresRegister());
5158}
5159
5160void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) {
5161 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005162 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005163 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005164 instruction->GetIndex(), kX86PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005165 __ movl(locations->Out().AsRegister<Register>(),
5166 Address(locations->InAt(0).AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005167 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005168 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005169 instruction->GetIndex(), kX86PointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005170 __ movl(locations->Out().AsRegister<Register>(),
5171 Address(locations->InAt(0).AsRegister<Register>(),
5172 mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
5173 // temp = temp->GetImtEntryAt(method_offset);
5174 __ movl(locations->Out().AsRegister<Register>(),
5175 Address(locations->Out().AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005176 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005177}
5178
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005179void LocationsBuilderX86::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005180 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005181 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01005182 locations->SetInAt(0, Location::RequiresRegister());
5183 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01005184}
5185
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005186void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
5187 LocationSummary* locations = not_->GetLocations();
Roland Levillain70566432014-10-24 16:20:17 +01005188 Location in = locations->InAt(0);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01005189 Location out = locations->Out();
Roland Levillain70566432014-10-24 16:20:17 +01005190 DCHECK(in.Equals(out));
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005191 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005192 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00005193 __ notl(out.AsRegister<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005194 break;
5195
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005196 case DataType::Type::kInt64:
Roland Levillain70566432014-10-24 16:20:17 +01005197 __ notl(out.AsRegisterPairLow<Register>());
5198 __ notl(out.AsRegisterPairHigh<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005199 break;
5200
5201 default:
5202 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
5203 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01005204}
5205
David Brazdil66d126e2015-04-03 16:02:44 +01005206void LocationsBuilderX86::VisitBooleanNot(HBooleanNot* bool_not) {
5207 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005208 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01005209 locations->SetInAt(0, Location::RequiresRegister());
5210 locations->SetOut(Location::SameAsFirstInput());
5211}
5212
5213void InstructionCodeGeneratorX86::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01005214 LocationSummary* locations = bool_not->GetLocations();
5215 Location in = locations->InAt(0);
5216 Location out = locations->Out();
5217 DCHECK(in.Equals(out));
5218 __ xorl(out.AsRegister<Register>(), Immediate(1));
5219}
5220
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005221void LocationsBuilderX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005222 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005223 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00005224 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005225 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005226 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005227 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005228 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005229 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005230 case DataType::Type::kInt32:
5231 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00005232 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravleddb7df22014-11-25 20:56:51 +00005233 locations->SetInAt(1, Location::Any());
5234 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5235 break;
5236 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005237 case DataType::Type::kFloat32:
5238 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00005239 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00005240 if (compare->InputAt(1)->IsX86LoadFromConstantTable()) {
5241 DCHECK(compare->InputAt(1)->IsEmittedAtUseSite());
5242 } else if (compare->InputAt(1)->IsConstant()) {
5243 locations->SetInAt(1, Location::RequiresFpuRegister());
5244 } else {
5245 locations->SetInAt(1, Location::Any());
5246 }
Calin Juravleddb7df22014-11-25 20:56:51 +00005247 locations->SetOut(Location::RequiresRegister());
5248 break;
5249 }
5250 default:
5251 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
5252 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005253}
5254
5255void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005256 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00005257 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00005258 Location left = locations->InAt(0);
5259 Location right = locations->InAt(1);
5260
Mark Mendell0c9497d2015-08-21 09:30:05 -04005261 NearLabel less, greater, done;
Aart Bika19616e2016-02-01 18:57:58 -08005262 Condition less_cond = kLess;
5263
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005264 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005265 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005266 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005267 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005268 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005269 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005270 case DataType::Type::kInt32: {
Roland Levillain0b671c02016-08-19 12:02:34 +01005271 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08005272 break;
5273 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005274 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005275 Register left_low = left.AsRegisterPairLow<Register>();
5276 Register left_high = left.AsRegisterPairHigh<Register>();
5277 int32_t val_low = 0;
5278 int32_t val_high = 0;
5279 bool right_is_const = false;
5280
5281 if (right.IsConstant()) {
5282 DCHECK(right.GetConstant()->IsLongConstant());
5283 right_is_const = true;
Vladimir Markocde64972023-04-25 16:40:06 +00005284 int64_t val = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005285 val_low = Low32Bits(val);
5286 val_high = High32Bits(val);
5287 }
5288
Calin Juravleddb7df22014-11-25 20:56:51 +00005289 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005290 __ cmpl(left_high, right.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005291 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005292 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005293 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005294 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08005295 codegen_->Compare32BitValue(left_high, val_high);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005296 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005297 __ j(kLess, &less); // Signed compare.
5298 __ j(kGreater, &greater); // Signed compare.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01005299 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005300 __ cmpl(left_low, right.AsRegisterPairLow<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005301 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005302 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005303 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005304 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08005305 codegen_->Compare32BitValue(left_low, val_low);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005306 }
Aart Bika19616e2016-02-01 18:57:58 -08005307 less_cond = kBelow; // for CF (unsigned).
Calin Juravleddb7df22014-11-25 20:56:51 +00005308 break;
5309 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005310 case DataType::Type::kFloat32: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00005311 GenerateFPCompare(left, right, compare, false);
Calin Juravleddb7df22014-11-25 20:56:51 +00005312 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08005313 less_cond = kBelow; // for CF (floats).
Calin Juravleddb7df22014-11-25 20:56:51 +00005314 break;
5315 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005316 case DataType::Type::kFloat64: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00005317 GenerateFPCompare(left, right, compare, true);
Calin Juravleddb7df22014-11-25 20:56:51 +00005318 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08005319 less_cond = kBelow; // for CF (floats).
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005320 break;
5321 }
5322 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00005323 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005324 }
Aart Bika19616e2016-02-01 18:57:58 -08005325
Calin Juravleddb7df22014-11-25 20:56:51 +00005326 __ movl(out, Immediate(0));
5327 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08005328 __ j(less_cond, &less);
Calin Juravleddb7df22014-11-25 20:56:51 +00005329
5330 __ Bind(&greater);
5331 __ movl(out, Immediate(1));
5332 __ jmp(&done);
5333
5334 __ Bind(&less);
5335 __ movl(out, Immediate(-1));
5336
5337 __ Bind(&done);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005338}
5339
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01005340void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005341 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005342 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005343 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01005344 locations->SetInAt(i, Location::Any());
5345 }
5346 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01005347}
5348
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00005349void InstructionCodeGeneratorX86::VisitPhi([[maybe_unused]] HPhi* instruction) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005350 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01005351}
5352
Roland Levillain7c1559a2015-12-15 10:55:36 +00005353void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00005354 /*
5355 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
5356 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86 memory model.
5357 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
5358 */
5359 switch (kind) {
5360 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00005361 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00005362 break;
5363 }
5364 case MemBarrierKind::kAnyStore:
5365 case MemBarrierKind::kLoadAny:
5366 case MemBarrierKind::kStoreStore: {
5367 // nop
5368 break;
5369 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05005370 case MemBarrierKind::kNTStoreStore:
5371 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08005372 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05005373 break;
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005374 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005375}
5376
Vladimir Markodc151b22015-10-15 18:02:30 +01005377HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00005378 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
5379 [[maybe_unused]] ArtMethod* method) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005380 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005381}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005382
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005383Register CodeGeneratorX86::GetInvokeExtraParameter(HInvoke* invoke, Register temp) {
5384 if (invoke->IsInvokeStaticOrDirect()) {
Vladimir Markocde64972023-04-25 16:40:06 +00005385 return GetInvokeStaticOrDirectExtraParameter(invoke->AsInvokeStaticOrDirect(), temp);
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005386 }
5387 DCHECK(invoke->IsInvokeInterface());
5388 Location location =
Vladimir Markocde64972023-04-25 16:40:06 +00005389 invoke->GetLocations()->InAt(invoke->AsInvokeInterface()->GetSpecialInputIndex());
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005390 return location.AsRegister<Register>();
5391}
5392
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005393Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
5394 Register temp) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00005395 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005396 if (!invoke->GetLocations()->Intrinsified()) {
5397 return location.AsRegister<Register>();
5398 }
5399 // For intrinsics we allow any location, so it may be on the stack.
5400 if (!location.IsRegister()) {
5401 __ movl(temp, Address(ESP, location.GetStackIndex()));
5402 return temp;
5403 }
5404 // For register locations, check if the register was saved. If so, get it from the stack.
5405 // Note: There is a chance that the register was saved but not overwritten, so we could
5406 // save one load. However, since this is just an intrinsic slow path we prefer this
5407 // simple and more robust approach rather that trying to determine if that's the case.
5408 SlowPathCode* slow_path = GetCurrentSlowPath();
Vladimir Marko4ee8e292017-06-02 15:39:30 +00005409 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
5410 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
5411 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
5412 __ movl(temp, Address(ESP, stack_offset));
5413 return temp;
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005414 }
5415 return location.AsRegister<Register>();
5416}
5417
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005418void CodeGeneratorX86::LoadMethod(MethodLoadKind load_kind, Location temp, HInvoke* invoke) {
5419 switch (load_kind) {
5420 case MethodLoadKind::kBootImageLinkTimePcRelative: {
5421 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
5422 Register base_reg = GetInvokeExtraParameter(invoke, temp.AsRegister<Register>());
5423 __ leal(temp.AsRegister<Register>(),
5424 Address(base_reg, CodeGeneratorX86::kPlaceholder32BitOffset));
5425 RecordBootImageMethodPatch(invoke);
5426 break;
5427 }
5428 case MethodLoadKind::kBootImageRelRo: {
5429 size_t index = invoke->IsInvokeInterface()
Vladimir Markocde64972023-04-25 16:40:06 +00005430 ? invoke->AsInvokeInterface()->GetSpecialInputIndex()
5431 : invoke->AsInvokeStaticOrDirect()->GetSpecialInputIndex();
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005432 Register base_reg = GetInvokeExtraParameter(invoke, temp.AsRegister<Register>());
5433 __ movl(temp.AsRegister<Register>(), Address(base_reg, kPlaceholder32BitOffset));
5434 RecordBootImageRelRoPatch(
Vladimir Markocde64972023-04-25 16:40:06 +00005435 invoke->InputAt(index)->AsX86ComputeBaseMethodAddress(),
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005436 GetBootImageOffset(invoke));
5437 break;
5438 }
5439 case MethodLoadKind::kBssEntry: {
5440 Register base_reg = GetInvokeExtraParameter(invoke, temp.AsRegister<Register>());
5441 __ movl(temp.AsRegister<Register>(), Address(base_reg, kPlaceholder32BitOffset));
5442 RecordMethodBssEntryPatch(invoke);
5443 // No need for memory fence, thanks to the x86 memory model.
5444 break;
5445 }
5446 case MethodLoadKind::kJitDirectAddress: {
5447 __ movl(temp.AsRegister<Register>(),
5448 Immediate(reinterpret_cast32<uint32_t>(invoke->GetResolvedMethod())));
5449 break;
5450 }
5451 case MethodLoadKind::kRuntimeCall: {
5452 // Test situation, don't do anything.
5453 break;
5454 }
5455 default: {
5456 LOG(FATAL) << "Load kind should have already been handled " << load_kind;
5457 UNREACHABLE();
5458 }
5459 }
5460}
5461
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005462void CodeGeneratorX86::GenerateStaticOrDirectCall(
5463 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Vladimir Marko58155012015-08-19 12:49:41 +00005464 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
5465 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01005466 case MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005467 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005468 uint32_t offset =
5469 GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
5470 __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
Vladimir Marko58155012015-08-19 12:49:41 +00005471 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005472 }
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005473 case MethodLoadKind::kRecursive: {
Vladimir Marko86c87522020-05-11 16:55:55 +01005474 callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005475 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005476 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01005477 case MethodLoadKind::kRuntimeCall: {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005478 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5479 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01005480 }
Vladimir Markoeb9eb002020-10-02 13:54:19 +01005481 case MethodLoadKind::kBootImageLinkTimePcRelative:
5482 // For kCallCriticalNative we skip loading the method and do the call directly.
5483 if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
5484 break;
5485 }
5486 FALLTHROUGH_INTENDED;
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005487 default: {
5488 LoadMethod(invoke->GetMethodLoadKind(), callee_method, invoke);
5489 }
Vladimir Marko58155012015-08-19 12:49:41 +00005490 }
5491
5492 switch (invoke->GetCodePtrLocation()) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01005493 case CodePtrLocation::kCallSelf:
Nicolas Geoffray282795c2021-09-24 18:16:41 +01005494 DCHECK(!GetGraph()->HasShouldDeoptimizeFlag());
Vladimir Marko58155012015-08-19 12:49:41 +00005495 __ call(GetFrameEntryLabel());
Vladimir Marko86c87522020-05-11 16:55:55 +01005496 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Vladimir Marko58155012015-08-19 12:49:41 +00005497 break;
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01005498 case CodePtrLocation::kCallCriticalNative: {
Vladimir Marko86c87522020-05-11 16:55:55 +01005499 size_t out_frame_size =
5500 PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorX86,
5501 kNativeStackAlignment,
Vladimir Markodec78172020-06-19 15:31:23 +01005502 GetCriticalNativeDirectCallFrameSize>(invoke);
Vladimir Markoeb9eb002020-10-02 13:54:19 +01005503 if (invoke->GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative) {
5504 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
5505 Register base_reg = GetInvokeExtraParameter(invoke, temp.AsRegister<Register>());
5506 __ call(Address(base_reg, CodeGeneratorX86::kPlaceholder32BitOffset));
5507 RecordBootImageJniEntrypointPatch(invoke);
5508 } else {
5509 // (callee_method + offset_of_jni_entry_point)()
5510 __ call(Address(callee_method.AsRegister<Register>(),
5511 ArtMethod::EntryPointFromJniOffset(kX86PointerSize).Int32Value()));
5512 }
Vladimir Marko86c87522020-05-11 16:55:55 +01005513 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5514 if (out_frame_size == 0u && DataType::IsFloatingPointType(invoke->GetType())) {
5515 // Create space for conversion.
5516 out_frame_size = 8u;
Vladimir Markodec78172020-06-19 15:31:23 +01005517 IncreaseFrame(out_frame_size);
Vladimir Marko86c87522020-05-11 16:55:55 +01005518 }
5519 // Zero-/sign-extend or move the result when needed due to native and managed ABI mismatch.
5520 switch (invoke->GetType()) {
5521 case DataType::Type::kBool:
5522 __ movzxb(EAX, AL);
5523 break;
5524 case DataType::Type::kInt8:
5525 __ movsxb(EAX, AL);
5526 break;
5527 case DataType::Type::kUint16:
5528 __ movzxw(EAX, EAX);
5529 break;
5530 case DataType::Type::kInt16:
5531 __ movsxw(EAX, EAX);
5532 break;
5533 case DataType::Type::kFloat32:
5534 __ fstps(Address(ESP, 0));
5535 __ movss(XMM0, Address(ESP, 0));
5536 break;
5537 case DataType::Type::kFloat64:
5538 __ fstpl(Address(ESP, 0));
5539 __ movsd(XMM0, Address(ESP, 0));
5540 break;
5541 case DataType::Type::kInt32:
5542 case DataType::Type::kInt64:
5543 case DataType::Type::kVoid:
5544 break;
5545 default:
5546 DCHECK(false) << invoke->GetType();
5547 break;
5548 }
5549 if (out_frame_size != 0u) {
Vladimir Markodec78172020-06-19 15:31:23 +01005550 DecreaseFrame(out_frame_size);
Vladimir Marko86c87522020-05-11 16:55:55 +01005551 }
5552 break;
5553 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01005554 case CodePtrLocation::kCallArtMethod:
Vladimir Marko58155012015-08-19 12:49:41 +00005555 // (callee_method + offset_of_quick_compiled_code)()
5556 __ call(Address(callee_method.AsRegister<Register>(),
5557 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005558 kX86PointerSize).Int32Value()));
Vladimir Marko86c87522020-05-11 16:55:55 +01005559 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Vladimir Marko58155012015-08-19 12:49:41 +00005560 break;
Mark Mendell09ed1a32015-03-25 08:30:06 -04005561 }
5562
5563 DCHECK(!IsLeafMethod());
Mark Mendell09ed1a32015-03-25 08:30:06 -04005564}
5565
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005566void CodeGeneratorX86::GenerateVirtualCall(
5567 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00005568 Register temp = temp_in.AsRegister<Register>();
5569 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5570 invoke->GetVTableIndex(), kX86PointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005571
5572 // Use the calling convention instead of the location of the receiver, as
5573 // intrinsics may have put the receiver in a different register. In the intrinsics
5574 // slow path, the arguments have been moved to the right place, so here we are
5575 // guaranteed that the receiver is the first register of the calling convention.
5576 InvokeDexCallingConvention calling_convention;
5577 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00005578 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005579 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005580 __ movl(temp, Address(receiver, class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00005581 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005582 // Instead of simply (possibly) unpoisoning `temp` here, we should
5583 // emit a read barrier for the previous class reference load.
5584 // However this is not required in practice, as this is an
5585 // intermediate/temporary reference and because the current
5586 // concurrent copying collector keeps the from-space memory
5587 // intact/accessible until the end of the marking phase (the
5588 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00005589 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00005590
5591 MaybeGenerateInlineCacheCheck(invoke, temp);
5592
Andreas Gampebfb5ba92015-09-01 15:45:02 +00005593 // temp = temp->GetMethodAt(method_offset);
5594 __ movl(temp, Address(temp, method_offset));
5595 // call temp->GetEntryPoint();
5596 __ call(Address(
Andreas Gampe542451c2016-07-26 09:02:02 -07005597 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005598 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00005599}
5600
Vladimir Marko6fd16062018-06-26 11:02:04 +01005601void CodeGeneratorX86::RecordBootImageIntrinsicPatch(HX86ComputeBaseMethodAddress* method_address,
5602 uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01005603 boot_image_other_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08005604 method_address, /* target_dex_file= */ nullptr, intrinsic_data);
Vladimir Marko2d06e022019-07-08 15:45:19 +01005605 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01005606}
5607
Vladimir Markob066d432018-01-03 13:14:37 +00005608void CodeGeneratorX86::RecordBootImageRelRoPatch(HX86ComputeBaseMethodAddress* method_address,
5609 uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01005610 boot_image_other_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08005611 method_address, /* target_dex_file= */ nullptr, boot_image_offset);
Vladimir Marko2d06e022019-07-08 15:45:19 +01005612 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00005613}
5614
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005615void CodeGeneratorX86::RecordBootImageMethodPatch(HInvoke* invoke) {
5616 size_t index = invoke->IsInvokeInterface()
Vladimir Markocde64972023-04-25 16:40:06 +00005617 ? invoke->AsInvokeInterface()->GetSpecialInputIndex()
5618 : invoke->AsInvokeStaticOrDirect()->GetSpecialInputIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005619 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005620 invoke->InputAt(index)->AsX86ComputeBaseMethodAddress();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005621 boot_image_method_patches_.emplace_back(
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01005622 method_address,
5623 invoke->GetResolvedMethodReference().dex_file,
5624 invoke->GetResolvedMethodReference().index);
Vladimir Marko65979462017-05-19 17:25:12 +01005625 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005626}
5627
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01005628void CodeGeneratorX86::RecordMethodBssEntryPatch(HInvoke* invoke) {
5629 size_t index = invoke->IsInvokeInterface()
Vladimir Markocde64972023-04-25 16:40:06 +00005630 ? invoke->AsInvokeInterface()->GetSpecialInputIndex()
5631 : invoke->AsInvokeStaticOrDirect()->GetSpecialInputIndex();
Santiago Aboy Solanesa0232ad2021-11-08 17:00:06 +00005632 DCHECK(IsSameDexFile(GetGraph()->GetDexFile(), *invoke->GetMethodReference().dex_file) ||
Santiago Aboy Solanes69a87e32022-03-08 16:43:54 +00005633 GetCompilerOptions().WithinOatFile(invoke->GetMethodReference().dex_file) ||
5634 ContainsElement(Runtime::Current()->GetClassLinker()->GetBootClassPath(),
5635 invoke->GetMethodReference().dex_file));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005636 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005637 invoke->InputAt(index)->AsX86ComputeBaseMethodAddress();
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005638 // Add the patch entry and bind its label at the end of the instruction.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005639 method_bss_entry_patches_.emplace_back(
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01005640 method_address,
5641 invoke->GetMethodReference().dex_file,
5642 invoke->GetMethodReference().index);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005643 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005644}
5645
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005646void CodeGeneratorX86::RecordBootImageTypePatch(HLoadClass* load_class) {
5647 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005648 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005649 boot_image_type_patches_.emplace_back(
5650 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00005651 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005652}
5653
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005654Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005655 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005656 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
Vladimir Marko8f63f102020-09-28 12:10:28 +01005657 ArenaDeque<X86PcRelativePatchInfo>* patches = nullptr;
5658 switch (load_class->GetLoadKind()) {
5659 case HLoadClass::LoadKind::kBssEntry:
5660 patches = &type_bss_entry_patches_;
5661 break;
5662 case HLoadClass::LoadKind::kBssEntryPublic:
5663 patches = &public_type_bss_entry_patches_;
5664 break;
5665 case HLoadClass::LoadKind::kBssEntryPackage:
5666 patches = &package_type_bss_entry_patches_;
5667 break;
5668 default:
5669 LOG(FATAL) << "Unexpected load kind: " << load_class->GetLoadKind();
5670 UNREACHABLE();
5671 }
5672 patches->emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005673 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko8f63f102020-09-28 12:10:28 +01005674 return &patches->back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005675}
5676
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005677void CodeGeneratorX86::RecordBootImageStringPatch(HLoadString* load_string) {
5678 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005679 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005680 boot_image_string_patches_.emplace_back(
5681 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
5682 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01005683}
5684
Vladimir Markoaad75c62016-10-03 08:46:48 +00005685Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005686 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005687 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005688 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005689 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005690 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005691}
5692
Vladimir Markoeb9eb002020-10-02 13:54:19 +01005693void CodeGeneratorX86::RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke) {
5694 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005695 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Markoeb9eb002020-10-02 13:54:19 +01005696 boot_image_jni_entrypoint_patches_.emplace_back(
5697 method_address,
5698 invoke->GetResolvedMethodReference().dex_file,
5699 invoke->GetResolvedMethodReference().index);
5700 __ Bind(&boot_image_jni_entrypoint_patches_.back().label);
5701}
5702
Vladimir Markoeebb8212018-06-05 14:57:24 +01005703void CodeGeneratorX86::LoadBootImageAddress(Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01005704 uint32_t boot_image_reference,
Vladimir Markoeebb8212018-06-05 14:57:24 +01005705 HInvokeStaticOrDirect* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01005706 if (GetCompilerOptions().IsBootImage()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01005707 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005708 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko6fd16062018-06-26 11:02:04 +01005709 DCHECK(method_address != nullptr);
5710 Register method_address_reg =
5711 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00005712 __ leal(reg, Address(method_address_reg, CodeGeneratorX86::kPlaceholder32BitOffset));
Vladimir Marko6fd16062018-06-26 11:02:04 +01005713 RecordBootImageIntrinsicPatch(method_address, boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01005714 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01005715 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005716 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Markoeebb8212018-06-05 14:57:24 +01005717 DCHECK(method_address != nullptr);
5718 Register method_address_reg =
5719 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00005720 __ movl(reg, Address(method_address_reg, CodeGeneratorX86::kPlaceholder32BitOffset));
Vladimir Marko6fd16062018-06-26 11:02:04 +01005721 RecordBootImageRelRoPatch(method_address, boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01005722 } else {
Vladimir Marko695348f2020-05-19 14:42:02 +01005723 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markoeebb8212018-06-05 14:57:24 +01005724 gc::Heap* heap = Runtime::Current()->GetHeap();
5725 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01005726 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01005727 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
5728 }
5729}
5730
Vladimir Markode91ca92020-10-27 13:41:40 +00005731void CodeGeneratorX86::LoadIntrinsicDeclaringClass(Register reg, HInvokeStaticOrDirect* invoke) {
5732 DCHECK_NE(invoke->GetIntrinsic(), Intrinsics::kNone);
Vladimir Marko6fd16062018-06-26 11:02:04 +01005733 if (GetCompilerOptions().IsBootImage()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01005734 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
Vladimir Marko6fd16062018-06-26 11:02:04 +01005735 HX86ComputeBaseMethodAddress* method_address =
Vladimir Markocde64972023-04-25 16:40:06 +00005736 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko6fd16062018-06-26 11:02:04 +01005737 DCHECK(method_address != nullptr);
5738 Register method_address_reg =
5739 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
Vladimir Markode91ca92020-10-27 13:41:40 +00005740 __ leal(reg, Address(method_address_reg, CodeGeneratorX86::kPlaceholder32BitOffset));
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01005741 MethodReference target_method = invoke->GetResolvedMethodReference();
Vladimir Marko6fd16062018-06-26 11:02:04 +01005742 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
5743 boot_image_type_patches_.emplace_back(method_address, target_method.dex_file, type_idx.index_);
5744 __ Bind(&boot_image_type_patches_.back().label);
5745 } else {
Vladimir Markode91ca92020-10-27 13:41:40 +00005746 uint32_t boot_image_offset = GetBootImageOffsetOfIntrinsicDeclaringClass(invoke);
5747 LoadBootImageAddress(reg, boot_image_offset, invoke);
Vladimir Marko6fd16062018-06-26 11:02:04 +01005748 }
Vladimir Marko6fd16062018-06-26 11:02:04 +01005749}
5750
Vladimir Markoaad75c62016-10-03 08:46:48 +00005751// The label points to the end of the "movl" or another instruction but the literal offset
5752// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
5753constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
5754
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005755template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00005756inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005757 const ArenaDeque<X86PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005758 ArenaVector<linker::LinkerPatch>* linker_patches) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005759 for (const X86PcRelativePatchInfo& info : infos) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005760 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005761 linker_patches->push_back(Factory(literal_offset,
5762 info.target_dex_file,
5763 GetMethodAddressOffset(info.method_address),
5764 info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005765 }
5766}
5767
Vladimir Marko6fd16062018-06-26 11:02:04 +01005768template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
5769linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
5770 const DexFile* target_dex_file,
5771 uint32_t pc_insn_offset,
5772 uint32_t boot_image_offset) {
5773 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
5774 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00005775}
5776
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005777void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00005778 DCHECK(linker_patches->empty());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005779 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01005780 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005781 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00005782 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01005783 type_bss_entry_patches_.size() +
Vladimir Marko8f63f102020-09-28 12:10:28 +01005784 public_type_bss_entry_patches_.size() +
5785 package_type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005786 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01005787 string_bss_entry_patches_.size() +
Vladimir Markoeb9eb002020-10-02 13:54:19 +01005788 boot_image_jni_entrypoint_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01005789 boot_image_other_patches_.size();
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005790 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01005791 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005792 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
5793 boot_image_method_patches_, linker_patches);
5794 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
5795 boot_image_type_patches_, linker_patches);
5796 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005797 boot_image_string_patches_, linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005798 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01005799 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005800 DCHECK(boot_image_type_patches_.empty());
5801 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01005802 }
5803 if (GetCompilerOptions().IsBootImage()) {
5804 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
5805 boot_image_other_patches_, linker_patches);
5806 } else {
5807 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
5808 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005809 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005810 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
5811 method_bss_entry_patches_, linker_patches);
5812 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
5813 type_bss_entry_patches_, linker_patches);
Vladimir Marko8f63f102020-09-28 12:10:28 +01005814 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PublicTypeBssEntryPatch>(
5815 public_type_bss_entry_patches_, linker_patches);
5816 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PackageTypeBssEntryPatch>(
5817 package_type_bss_entry_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005818 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
5819 string_bss_entry_patches_, linker_patches);
Vladimir Markoeb9eb002020-10-02 13:54:19 +01005820 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
5821 boot_image_jni_entrypoint_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00005822 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00005823}
5824
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00005825void CodeGeneratorX86::MarkGCCard(
5826 Register temp, Register card, Register object, Register value, bool emit_null_check) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005827 NearLabel is_null;
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00005828 if (emit_null_check) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005829 __ testl(value, value);
5830 __ j(kEqual, &is_null);
5831 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005832 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005833 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01005834 // Calculate the offset (in the card table) of the card corresponding to
5835 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005836 __ movl(temp, object);
5837 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005838 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5839 // `object`'s card.
5840 //
5841 // Register `card` contains the address of the card table. Note that the card
5842 // table's base is biased during its creation so that it always starts at an
5843 // address whose least-significant byte is equal to `kCardDirty` (see
5844 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5845 // below writes the `kCardDirty` (byte) value into the `object`'s card
5846 // (located at `card + object >> kCardShift`).
5847 //
5848 // This dual use of the value in register `card` (1. to calculate the location
5849 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5850 // (no need to explicitly load `kCardDirty` as an immediate value).
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00005851 __ movb(Address(temp, card, TIMES_1, 0),
5852 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00005853 if (emit_null_check) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005854 __ Bind(&is_null);
5855 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005856}
5857
Calin Juravle52c48962014-12-16 17:02:57 +00005858void LocationsBuilderX86::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
Alex Light3a73ffb2021-01-25 14:11:05 +00005859 DCHECK(instruction->IsInstanceFieldGet() ||
5860 instruction->IsStaticFieldGet() ||
5861 instruction->IsPredicatedInstanceFieldGet());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005862
5863 bool object_field_get_with_read_barrier =
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00005864 gUseReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alex Light3a73ffb2021-01-25 14:11:05 +00005865 bool is_predicated = instruction->IsPredicatedInstanceFieldGet();
Nicolas Geoffray39468442014-09-02 15:17:15 +01005866 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005867 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00005868 gUseReadBarrier
Vladimir Markoca6fff82017-10-03 14:49:14 +01005869 ? LocationSummary::kCallOnSlowPath
5870 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005871 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005872 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005873 }
Alex Light3a73ffb2021-01-25 14:11:05 +00005874 // receiver_input
5875 locations->SetInAt(is_predicated ? 1 : 0, Location::RequiresRegister());
5876 if (is_predicated) {
5877 if (DataType::IsFloatingPointType(instruction->GetType())) {
5878 locations->SetInAt(0, Location::RequiresFpuRegister());
5879 } else {
5880 locations->SetInAt(0, Location::RequiresRegister());
5881 }
5882 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005883 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alex Light3a73ffb2021-01-25 14:11:05 +00005884 locations->SetOut(is_predicated ? Location::SameAsFirstInput()
5885 : Location::RequiresFpuRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005886 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005887 // The output overlaps in case of long: we don't want the low move
5888 // to overwrite the object's location. Likewise, in the case of
5889 // an object field get with read barriers enabled, we do not want
5890 // the move to overwrite the object's location, as we need it to emit
5891 // the read barrier.
Alex Light3a73ffb2021-01-25 14:11:05 +00005892 locations->SetOut(is_predicated ? Location::SameAsFirstInput() : Location::RequiresRegister(),
5893 (object_field_get_with_read_barrier ||
5894 instruction->GetType() == DataType::Type::kInt64 ||
5895 is_predicated)
5896 ? Location::kOutputOverlap
5897 : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005898 }
Calin Juravle52c48962014-12-16 17:02:57 +00005899
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005900 if (field_info.IsVolatile() && (field_info.GetFieldType() == DataType::Type::kInt64)) {
Calin Juravle52c48962014-12-16 17:02:57 +00005901 // Long values can be loaded atomically into an XMM using movsd.
Roland Levillain7c1559a2015-12-15 10:55:36 +00005902 // So we use an XMM register as a temp to achieve atomicity (first
5903 // load the temp into the XMM and then copy the XMM into the
5904 // output, 32 bits at a time).
Calin Juravle52c48962014-12-16 17:02:57 +00005905 locations->AddTemp(Location::RequiresFpuRegister());
5906 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005907}
5908
Calin Juravle52c48962014-12-16 17:02:57 +00005909void InstructionCodeGeneratorX86::HandleFieldGet(HInstruction* instruction,
5910 const FieldInfo& field_info) {
Alex Light3a73ffb2021-01-25 14:11:05 +00005911 DCHECK(instruction->IsInstanceFieldGet() ||
5912 instruction->IsStaticFieldGet() ||
5913 instruction->IsPredicatedInstanceFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005914
Calin Juravle52c48962014-12-16 17:02:57 +00005915 LocationSummary* locations = instruction->GetLocations();
Alex Light3a73ffb2021-01-25 14:11:05 +00005916 Location base_loc = locations->InAt(instruction->IsPredicatedInstanceFieldGet() ? 1 : 0);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005917 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00005918 Location out = locations->Out();
5919 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01005920 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
5921 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00005922 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5923
Ulya Trafimovich322eced2021-06-02 15:39:36 +01005924 if (load_type == DataType::Type::kReference) {
5925 // /* HeapReference<Object> */ out = *(base + offset)
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00005926 if (gUseReadBarrier && kUseBakerReadBarrier) {
Ulya Trafimovich322eced2021-06-02 15:39:36 +01005927 // Note that a potential implicit null check is handled in this
5928 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
5929 codegen_->GenerateFieldLoadWithBakerReadBarrier(
5930 instruction, out, base, offset, /* needs_null_check= */ true);
Calin Juravle52c48962014-12-16 17:02:57 +00005931 if (is_volatile) {
Ulya Trafimovich322eced2021-06-02 15:39:36 +01005932 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005933 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00005934 } else {
Ulya Trafimovich322eced2021-06-02 15:39:36 +01005935 __ movl(out.AsRegister<Register>(), Address(base, offset));
5936 codegen_->MaybeRecordImplicitNullCheck(instruction);
5937 if (is_volatile) {
5938 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5939 }
5940 // If read barriers are enabled, emit read barriers other than
5941 // Baker's using a slow path (and also unpoison the loaded
5942 // reference, if heap poisoning is enabled).
5943 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
5944 }
5945 } else {
5946 Address src(base, offset);
5947 XmmRegister temp = (load_type == DataType::Type::kInt64 && is_volatile)
5948 ? locations->GetTemp(0).AsFpuRegister<XmmRegister>()
5949 : kNoXmmRegister;
5950 codegen_->LoadFromMemoryNoBarrier(load_type, out, src, instruction, temp, is_volatile);
5951 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005952 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5953 }
Roland Levillain4d027112015-07-01 15:41:14 +01005954 }
Calin Juravle52c48962014-12-16 17:02:57 +00005955}
5956
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00005957void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction,
5958 const FieldInfo& field_info,
5959 WriteBarrierKind write_barrier_kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00005960 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5961
5962 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005963 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00005964 locations->SetInAt(0, Location::RequiresRegister());
5965 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005966 DataType::Type field_type = field_info.GetFieldType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005967 bool is_byte_type = DataType::Size(field_type) == 1u;
Calin Juravle52c48962014-12-16 17:02:57 +00005968
5969 // The register allocator does not support multiple
5970 // inputs that die at entry with one in a specific register.
5971 if (is_byte_type) {
5972 // Ensure the value is in a byte register.
5973 locations->SetInAt(1, Location::RegisterLocation(EAX));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005974 } else if (DataType::IsFloatingPointType(field_type)) {
5975 if (is_volatile && field_type == DataType::Type::kFloat64) {
Mark Mendell81489372015-11-04 11:30:41 -05005976 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5977 locations->SetInAt(1, Location::RequiresFpuRegister());
5978 } else {
5979 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
5980 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005981 } else if (is_volatile && field_type == DataType::Type::kInt64) {
Mark Mendell81489372015-11-04 11:30:41 -05005982 // In order to satisfy the semantics of volatile, this must be a single instruction store.
Calin Juravle52c48962014-12-16 17:02:57 +00005983 locations->SetInAt(1, Location::RequiresRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005984
Calin Juravle52c48962014-12-16 17:02:57 +00005985 // 64bits value can be atomically written to an address with movsd and an XMM register.
5986 // We need two XMM registers because there's no easier way to (bit) copy a register pair
5987 // into a single XMM register (we copy each pair part into the XMMs and then interleave them).
5988 // NB: We could make the register allocator understand fp_reg <-> core_reg moves but given the
5989 // isolated cases when we need this it isn't worth adding the extra complexity.
5990 locations->AddTemp(Location::RequiresFpuRegister());
5991 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005992 } else {
5993 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5994
5995 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00005996 if (write_barrier_kind != WriteBarrierKind::kDontEmit) {
5997 locations->AddTemp(Location::RequiresRegister());
5998 // Ensure the card is in a byte register.
5999 locations->AddTemp(Location::RegisterLocation(ECX));
6000 } else if (kPoisonHeapReferences) {
6001 locations->AddTemp(Location::RequiresRegister());
6002 }
Mark Mendell81489372015-11-04 11:30:41 -05006003 }
Calin Juravle52c48962014-12-16 17:02:57 +00006004 }
6005}
6006
6007void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
Andra Danciucde98192020-09-13 12:32:09 +00006008 uint32_t value_index,
6009 DataType::Type field_type,
6010 Address field_addr,
6011 Register base,
6012 bool is_volatile,
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006013 bool value_can_be_null,
6014 WriteBarrierKind write_barrier_kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00006015 LocationSummary* locations = instruction->GetLocations();
Andra Danciucde98192020-09-13 12:32:09 +00006016 Location value = locations->InAt(value_index);
Roland Levillain4d027112015-07-01 15:41:14 +01006017 bool needs_write_barrier =
Andra Danciucde98192020-09-13 12:32:09 +00006018 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(value_index));
Calin Juravle52c48962014-12-16 17:02:57 +00006019
6020 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006021 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00006022 }
6023
Mark Mendell81489372015-11-04 11:30:41 -05006024 bool maybe_record_implicit_null_check_done = false;
6025
Calin Juravle52c48962014-12-16 17:02:57 +00006026 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006027 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006028 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006029 case DataType::Type::kInt8: {
Andra Danciucde98192020-09-13 12:32:09 +00006030 if (value.IsConstant()) {
6031 __ movb(field_addr, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
6032 } else {
6033 __ movb(field_addr, value.AsRegister<ByteRegister>());
6034 }
Calin Juravle52c48962014-12-16 17:02:57 +00006035 break;
6036 }
6037
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006038 case DataType::Type::kUint16:
6039 case DataType::Type::kInt16: {
Mark Mendell81489372015-11-04 11:30:41 -05006040 if (value.IsConstant()) {
Andra Danciucde98192020-09-13 12:32:09 +00006041 __ movw(field_addr, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell81489372015-11-04 11:30:41 -05006042 } else {
Andra Danciucde98192020-09-13 12:32:09 +00006043 __ movw(field_addr, value.AsRegister<Register>());
Mark Mendell81489372015-11-04 11:30:41 -05006044 }
Calin Juravle52c48962014-12-16 17:02:57 +00006045 break;
6046 }
6047
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006048 case DataType::Type::kInt32:
6049 case DataType::Type::kReference: {
Roland Levillain4d027112015-07-01 15:41:14 +01006050 if (kPoisonHeapReferences && needs_write_barrier) {
6051 // Note that in the case where `value` is a null reference,
6052 // we do not enter this block, as the reference does not
6053 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006054 DCHECK_EQ(field_type, DataType::Type::kReference);
Roland Levillain4d027112015-07-01 15:41:14 +01006055 Register temp = locations->GetTemp(0).AsRegister<Register>();
6056 __ movl(temp, value.AsRegister<Register>());
6057 __ PoisonHeapReference(temp);
Andra Danciucde98192020-09-13 12:32:09 +00006058 __ movl(field_addr, temp);
Mark Mendell81489372015-11-04 11:30:41 -05006059 } else if (value.IsConstant()) {
6060 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Andra Danciucde98192020-09-13 12:32:09 +00006061 __ movl(field_addr, Immediate(v));
Roland Levillain4d027112015-07-01 15:41:14 +01006062 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +00006063 DCHECK(value.IsRegister()) << value;
Andra Danciucde98192020-09-13 12:32:09 +00006064 __ movl(field_addr, value.AsRegister<Register>());
Roland Levillain4d027112015-07-01 15:41:14 +01006065 }
Calin Juravle52c48962014-12-16 17:02:57 +00006066 break;
6067 }
6068
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006069 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00006070 if (is_volatile) {
6071 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
6072 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
6073 __ movd(temp1, value.AsRegisterPairLow<Register>());
6074 __ movd(temp2, value.AsRegisterPairHigh<Register>());
6075 __ punpckldq(temp1, temp2);
Andra Danciucde98192020-09-13 12:32:09 +00006076 __ movsd(field_addr, temp1);
Calin Juravle77520bc2015-01-12 18:45:46 +00006077 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell81489372015-11-04 11:30:41 -05006078 } else if (value.IsConstant()) {
6079 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
Andra Danciucde98192020-09-13 12:32:09 +00006080 __ movl(field_addr, Immediate(Low32Bits(v)));
Mark Mendell81489372015-11-04 11:30:41 -05006081 codegen_->MaybeRecordImplicitNullCheck(instruction);
Ulya Trafimovich893e2ed2021-06-10 16:18:12 +01006082 __ movl(Address::displace(field_addr, kX86WordSize), Immediate(High32Bits(v)));
Calin Juravle52c48962014-12-16 17:02:57 +00006083 } else {
Andra Danciucde98192020-09-13 12:32:09 +00006084 __ movl(field_addr, value.AsRegisterPairLow<Register>());
Calin Juravle77520bc2015-01-12 18:45:46 +00006085 codegen_->MaybeRecordImplicitNullCheck(instruction);
Ulya Trafimovich893e2ed2021-06-10 16:18:12 +01006086 __ movl(Address::displace(field_addr, kX86WordSize), value.AsRegisterPairHigh<Register>());
Calin Juravle52c48962014-12-16 17:02:57 +00006087 }
Mark Mendell81489372015-11-04 11:30:41 -05006088 maybe_record_implicit_null_check_done = true;
Calin Juravle52c48962014-12-16 17:02:57 +00006089 break;
6090 }
6091
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006092 case DataType::Type::kFloat32: {
Mark Mendell81489372015-11-04 11:30:41 -05006093 if (value.IsConstant()) {
6094 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Andra Danciucde98192020-09-13 12:32:09 +00006095 __ movl(field_addr, Immediate(v));
Mark Mendell81489372015-11-04 11:30:41 -05006096 } else {
Andra Danciucde98192020-09-13 12:32:09 +00006097 __ movss(field_addr, value.AsFpuRegister<XmmRegister>());
Mark Mendell81489372015-11-04 11:30:41 -05006098 }
Calin Juravle52c48962014-12-16 17:02:57 +00006099 break;
6100 }
6101
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006102 case DataType::Type::kFloat64: {
Mark Mendell81489372015-11-04 11:30:41 -05006103 if (value.IsConstant()) {
Andra Danciuc992e422020-09-16 08:12:02 +00006104 DCHECK(!is_volatile);
Mark Mendell81489372015-11-04 11:30:41 -05006105 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
Andra Danciucde98192020-09-13 12:32:09 +00006106 __ movl(field_addr, Immediate(Low32Bits(v)));
Mark Mendell81489372015-11-04 11:30:41 -05006107 codegen_->MaybeRecordImplicitNullCheck(instruction);
Ulya Trafimovich893e2ed2021-06-10 16:18:12 +01006108 __ movl(Address::displace(field_addr, kX86WordSize), Immediate(High32Bits(v)));
Mark Mendell81489372015-11-04 11:30:41 -05006109 maybe_record_implicit_null_check_done = true;
6110 } else {
Andra Danciucde98192020-09-13 12:32:09 +00006111 __ movsd(field_addr, value.AsFpuRegister<XmmRegister>());
Mark Mendell81489372015-11-04 11:30:41 -05006112 }
Calin Juravle52c48962014-12-16 17:02:57 +00006113 break;
6114 }
6115
Aart Bik66c158e2018-01-31 12:55:04 -08006116 case DataType::Type::kUint32:
6117 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006118 case DataType::Type::kVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00006119 LOG(FATAL) << "Unreachable type " << field_type;
6120 UNREACHABLE();
6121 }
6122
Mark Mendell81489372015-11-04 11:30:41 -05006123 if (!maybe_record_implicit_null_check_done) {
Calin Juravle77520bc2015-01-12 18:45:46 +00006124 codegen_->MaybeRecordImplicitNullCheck(instruction);
6125 }
6126
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006127 if (needs_write_barrier && write_barrier_kind != WriteBarrierKind::kDontEmit) {
Calin Juravle77520bc2015-01-12 18:45:46 +00006128 Register temp = locations->GetTemp(0).AsRegister<Register>();
6129 Register card = locations->GetTemp(1).AsRegister<Register>();
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006130 codegen_->MarkGCCard(
6131 temp,
6132 card,
6133 base,
6134 value.AsRegister<Register>(),
6135 value_can_be_null && write_barrier_kind == WriteBarrierKind::kEmitWithNullCheck);
Calin Juravle77520bc2015-01-12 18:45:46 +00006136 }
6137
Calin Juravle52c48962014-12-16 17:02:57 +00006138 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006139 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00006140 }
6141}
6142
Andra Danciucde98192020-09-13 12:32:09 +00006143void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
6144 const FieldInfo& field_info,
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006145 bool value_can_be_null,
6146 WriteBarrierKind write_barrier_kind) {
Andra Danciucde98192020-09-13 12:32:09 +00006147 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
6148
6149 LocationSummary* locations = instruction->GetLocations();
6150 Register base = locations->InAt(0).AsRegister<Register>();
6151 bool is_volatile = field_info.IsVolatile();
6152 DataType::Type field_type = field_info.GetFieldType();
6153 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alex Light3a73ffb2021-01-25 14:11:05 +00006154 bool is_predicated =
Vladimir Markocde64972023-04-25 16:40:06 +00006155 instruction->IsInstanceFieldSet() && instruction->AsInstanceFieldSet()->GetIsPredicatedSet();
Andra Danciucde98192020-09-13 12:32:09 +00006156
6157 Address field_addr(base, offset);
6158
Alex Light3a73ffb2021-01-25 14:11:05 +00006159 NearLabel pred_is_null;
6160 if (is_predicated) {
6161 __ testl(base, base);
6162 __ j(kEqual, &pred_is_null);
6163 }
6164
Andra Danciucde98192020-09-13 12:32:09 +00006165 HandleFieldSet(instruction,
6166 /* value_index= */ 1,
6167 field_type,
6168 field_addr,
6169 base,
6170 is_volatile,
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006171 value_can_be_null,
6172 write_barrier_kind);
Alex Light3a73ffb2021-01-25 14:11:05 +00006173
6174 if (is_predicated) {
6175 __ Bind(&pred_is_null);
6176 }
Andra Danciucde98192020-09-13 12:32:09 +00006177}
6178
Calin Juravle52c48962014-12-16 17:02:57 +00006179void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6180 HandleFieldGet(instruction, instruction->GetFieldInfo());
6181}
6182
6183void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6184 HandleFieldGet(instruction, instruction->GetFieldInfo());
6185}
6186
6187void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006188 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetWriteBarrierKind());
Calin Juravle52c48962014-12-16 17:02:57 +00006189}
6190
6191void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006192 HandleFieldSet(instruction,
6193 instruction->GetFieldInfo(),
6194 instruction->GetValueCanBeNull(),
6195 instruction->GetWriteBarrierKind());
Calin Juravle52c48962014-12-16 17:02:57 +00006196}
6197
6198void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006199 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetWriteBarrierKind());
Calin Juravle52c48962014-12-16 17:02:57 +00006200}
6201
6202void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006203 HandleFieldSet(instruction,
6204 instruction->GetFieldInfo(),
6205 instruction->GetValueCanBeNull(),
6206 instruction->GetWriteBarrierKind());
Calin Juravle52c48962014-12-16 17:02:57 +00006207}
6208
Alex Light3a73ffb2021-01-25 14:11:05 +00006209void LocationsBuilderX86::VisitPredicatedInstanceFieldGet(
6210 HPredicatedInstanceFieldGet* instruction) {
6211 HandleFieldGet(instruction, instruction->GetFieldInfo());
6212}
6213
Calin Juravle52c48962014-12-16 17:02:57 +00006214void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6215 HandleFieldGet(instruction, instruction->GetFieldInfo());
6216}
6217
Alex Light3a73ffb2021-01-25 14:11:05 +00006218void InstructionCodeGeneratorX86::VisitPredicatedInstanceFieldGet(
6219 HPredicatedInstanceFieldGet* instruction) {
6220 NearLabel finish;
6221 LocationSummary* locations = instruction->GetLocations();
6222 Register recv = locations->InAt(1).AsRegister<Register>();
6223 __ testl(recv, recv);
6224 __ j(kZero, &finish);
6225 HandleFieldGet(instruction, instruction->GetFieldInfo());
6226 __ Bind(&finish);
6227}
Calin Juravle52c48962014-12-16 17:02:57 +00006228void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6229 HandleFieldGet(instruction, instruction->GetFieldInfo());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006230}
6231
Vladimir Marko552a1342017-10-31 10:56:47 +00006232void LocationsBuilderX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
6233 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(EAX));
6234}
6235
6236void InstructionCodeGeneratorX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
6237 __ movl(EAX, Immediate(instruction->GetFormat()->GetValue()));
6238 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
6239}
6240
Calin Juravlee460d1d2015-09-29 04:52:17 +01006241void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
6242 HUnresolvedInstanceFieldGet* instruction) {
6243 FieldAccessCallingConventionX86 calling_convention;
6244 codegen_->CreateUnresolvedFieldLocationSummary(
6245 instruction, instruction->GetFieldType(), calling_convention);
6246}
6247
6248void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
6249 HUnresolvedInstanceFieldGet* instruction) {
6250 FieldAccessCallingConventionX86 calling_convention;
6251 codegen_->GenerateUnresolvedFieldAccess(instruction,
6252 instruction->GetFieldType(),
6253 instruction->GetFieldIndex(),
6254 instruction->GetDexPc(),
6255 calling_convention);
6256}
6257
6258void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
6259 HUnresolvedInstanceFieldSet* instruction) {
6260 FieldAccessCallingConventionX86 calling_convention;
6261 codegen_->CreateUnresolvedFieldLocationSummary(
6262 instruction, instruction->GetFieldType(), calling_convention);
6263}
6264
6265void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
6266 HUnresolvedInstanceFieldSet* instruction) {
6267 FieldAccessCallingConventionX86 calling_convention;
6268 codegen_->GenerateUnresolvedFieldAccess(instruction,
6269 instruction->GetFieldType(),
6270 instruction->GetFieldIndex(),
6271 instruction->GetDexPc(),
6272 calling_convention);
6273}
6274
6275void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
6276 HUnresolvedStaticFieldGet* instruction) {
6277 FieldAccessCallingConventionX86 calling_convention;
6278 codegen_->CreateUnresolvedFieldLocationSummary(
6279 instruction, instruction->GetFieldType(), calling_convention);
6280}
6281
6282void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
6283 HUnresolvedStaticFieldGet* instruction) {
6284 FieldAccessCallingConventionX86 calling_convention;
6285 codegen_->GenerateUnresolvedFieldAccess(instruction,
6286 instruction->GetFieldType(),
6287 instruction->GetFieldIndex(),
6288 instruction->GetDexPc(),
6289 calling_convention);
6290}
6291
6292void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
6293 HUnresolvedStaticFieldSet* instruction) {
6294 FieldAccessCallingConventionX86 calling_convention;
6295 codegen_->CreateUnresolvedFieldLocationSummary(
6296 instruction, instruction->GetFieldType(), calling_convention);
6297}
6298
6299void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
6300 HUnresolvedStaticFieldSet* instruction) {
6301 FieldAccessCallingConventionX86 calling_convention;
6302 codegen_->GenerateUnresolvedFieldAccess(instruction,
6303 instruction->GetFieldType(),
6304 instruction->GetFieldIndex(),
6305 instruction->GetDexPc(),
6306 calling_convention);
6307}
6308
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006309void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006310 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6311 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
6312 ? Location::RequiresRegister()
6313 : Location::Any();
6314 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006315}
6316
Calin Juravle2ae48182016-03-16 14:05:09 +00006317void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
6318 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00006319 return;
6320 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006321 LocationSummary* locations = instruction->GetLocations();
6322 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00006323
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006324 __ testl(EAX, Address(obj.AsRegister<Register>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00006325 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006326}
6327
Calin Juravle2ae48182016-03-16 14:05:09 +00006328void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006329 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006330 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006331
6332 LocationSummary* locations = instruction->GetLocations();
6333 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006334
6335 if (obj.IsRegister()) {
Mark Mendell42514f62015-03-31 11:34:22 -04006336 __ testl(obj.AsRegister<Register>(), obj.AsRegister<Register>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01006337 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006338 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01006339 } else {
6340 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00006341 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01006342 __ jmp(slow_path->GetEntryLabel());
6343 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006344 }
6345 __ j(kEqual, slow_path->GetEntryLabel());
6346}
6347
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006348void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006349 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006350}
6351
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006352void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006353 bool object_array_get_with_read_barrier =
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00006354 gUseReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01006355 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006356 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
6357 object_array_get_with_read_barrier
6358 ? LocationSummary::kCallOnSlowPath
6359 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01006360 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006361 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006362 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01006363 locations->SetInAt(0, Location::RequiresRegister());
6364 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006365 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01006366 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6367 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006368 // The output overlaps in case of long: we don't want the low move
6369 // to overwrite the array's location. Likewise, in the case of an
6370 // object array get with read barriers enabled, we do not want the
6371 // move to overwrite the array's location, as we need it to emit
6372 // the read barrier.
6373 locations->SetOut(
6374 Location::RequiresRegister(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006375 (instruction->GetType() == DataType::Type::kInt64 || object_array_get_with_read_barrier)
6376 ? Location::kOutputOverlap
6377 : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01006378 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006379}
6380
6381void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
6382 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006383 Location obj_loc = locations->InAt(0);
6384 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006385 Location index = locations->InAt(1);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006386 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01006387 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006388
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006389 DataType::Type type = instruction->GetType();
Ulya Trafimovich322eced2021-06-02 15:39:36 +01006390 if (type == DataType::Type::kReference) {
6391 static_assert(
6392 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6393 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
6394 // /* HeapReference<Object> */ out =
6395 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00006396 if (gUseReadBarrier && kUseBakerReadBarrier) {
Ulya Trafimovich322eced2021-06-02 15:39:36 +01006397 // Note that a potential implicit null check is handled in this
6398 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
6399 codegen_->GenerateArrayLoadWithBakerReadBarrier(
6400 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
6401 } else {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006402 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006403 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006404 codegen_->MaybeRecordImplicitNullCheck(instruction);
Ulya Trafimovich322eced2021-06-02 15:39:36 +01006405 // If read barriers are enabled, emit read barriers other than
6406 // Baker's using a slow path (and also unpoison the loaded
6407 // reference, if heap poisoning is enabled).
6408 if (index.IsConstant()) {
6409 uint32_t offset =
Vladimir Markocde64972023-04-25 16:40:06 +00006410 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Ulya Trafimovich322eced2021-06-02 15:39:36 +01006411 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
6412 } else {
6413 codegen_->MaybeGenerateReadBarrierSlow(
6414 instruction, out_loc, out_loc, obj_loc, data_offset, index);
6415 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006416 }
Ulya Trafimovich322eced2021-06-02 15:39:36 +01006417 } else if (type == DataType::Type::kUint16
6418 && mirror::kUseStringCompression
6419 && instruction->IsStringCharAt()) {
6420 // Branch cases into compressed and uncompressed for each index's type.
6421 Register out = out_loc.AsRegister<Register>();
6422 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
6423 NearLabel done, not_compressed;
6424 __ testb(Address(obj, count_offset), Immediate(1));
Calin Juravle77520bc2015-01-12 18:45:46 +00006425 codegen_->MaybeRecordImplicitNullCheck(instruction);
Ulya Trafimovich322eced2021-06-02 15:39:36 +01006426 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
6427 "Expecting 0=compressed, 1=uncompressed");
6428 __ j(kNotZero, &not_compressed);
6429 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
6430 __ jmp(&done);
6431 __ Bind(&not_compressed);
6432 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
6433 __ Bind(&done);
6434 } else {
Ulya Trafimovichc8451cb2021-06-02 17:35:16 +01006435 ScaleFactor scale = CodeGenerator::ScaleFactorForType(type);
6436 Address src = CodeGeneratorX86::ArrayAddress(obj, index, scale, data_offset);
Ulya Trafimovich322eced2021-06-02 15:39:36 +01006437 codegen_->LoadFromMemoryNoBarrier(type, out_loc, src, instruction);
Calin Juravle77520bc2015-01-12 18:45:46 +00006438 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006439}
6440
6441void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006442 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006443
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006444 bool needs_write_barrier =
6445 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006446 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006447
Vladimir Markoca6fff82017-10-03 14:49:14 +01006448 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffray39468442014-09-02 15:17:15 +01006449 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006450 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffray39468442014-09-02 15:17:15 +01006451
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006452 bool is_byte_type = DataType::Size(value_type) == 1u;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006453 // We need the inputs to be different than the output in case of long operation.
6454 // In case of a byte operation, the register allocator does not support multiple
6455 // inputs that die at entry with one in a specific register.
6456 locations->SetInAt(0, Location::RequiresRegister());
6457 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
6458 if (is_byte_type) {
6459 // Ensure the value is in a byte register.
6460 locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006461 } else if (DataType::IsFloatingPointType(value_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05006462 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006463 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006464 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
6465 }
6466 if (needs_write_barrier) {
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006467 // Used by reference poisoning or emitting write barrier.
6468 locations->AddTemp(Location::RequiresRegister());
6469 if (instruction->GetWriteBarrierKind() != WriteBarrierKind::kDontEmit) {
6470 // Only used when emitting a write barrier. Ensure the card is in a byte register.
6471 locations->AddTemp(Location::RegisterLocation(ECX));
6472 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006473 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006474}
6475
6476void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
6477 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006478 Location array_loc = locations->InAt(0);
6479 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006480 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01006481 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006482 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006483 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006484 bool needs_write_barrier =
6485 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006486
6487 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006488 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006489 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006490 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006491 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006492 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006493 if (value.IsRegister()) {
6494 __ movb(address, value.AsRegister<ByteRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006495 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01006496 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006497 }
Calin Juravle77520bc2015-01-12 18:45:46 +00006498 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006499 break;
6500 }
6501
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006502 case DataType::Type::kUint16:
6503 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006504 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006505 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006506 if (value.IsRegister()) {
6507 __ movw(address, value.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006508 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01006509 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006510 }
Calin Juravle77520bc2015-01-12 18:45:46 +00006511 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006512 break;
6513 }
6514
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006515 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006516 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006517 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006518
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006519 if (!value.IsRegister()) {
6520 // Just setting null.
6521 DCHECK(instruction->InputAt(2)->IsNullConstant());
6522 DCHECK(value.IsConstant()) << value;
6523 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00006524 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006525 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006526 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006527 break;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006528 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006529
6530 DCHECK(needs_write_barrier);
6531 Register register_value = value.AsRegister<Register>();
Roland Levillain16d9f942016-08-25 17:27:56 +01006532 Location temp_loc = locations->GetTemp(0);
6533 Register temp = temp_loc.AsRegister<Register>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006534
6535 bool can_value_be_null = instruction->GetValueCanBeNull();
6536 NearLabel do_store;
6537 if (can_value_be_null) {
6538 __ testl(register_value, register_value);
6539 __ j(kEqual, &do_store);
6540 }
6541
6542 SlowPathCode* slow_path = nullptr;
6543 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006544 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006545 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006546
6547 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6548 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6549 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006550
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006551 // Note that when Baker read barriers are enabled, the type
6552 // checks are performed without read barriers. This is fine,
6553 // even in the case where a class object is in the from-space
6554 // after the flip, as a comparison involving such a type would
6555 // not produce a false positive; it may of course produce a
6556 // false negative, in which case we would take the ArraySet
6557 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01006558
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006559 // /* HeapReference<Class> */ temp = array->klass_
6560 __ movl(temp, Address(array, class_offset));
6561 codegen_->MaybeRecordImplicitNullCheck(instruction);
6562 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01006563
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006564 // /* HeapReference<Class> */ temp = temp->component_type_
6565 __ movl(temp, Address(temp, component_offset));
6566 // If heap poisoning is enabled, no need to unpoison `temp`
6567 // nor the object reference in `register_value->klass`, as
6568 // we are comparing two poisoned references.
6569 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01006570
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006571 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006572 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006573 __ j(kEqual, &do_put);
6574 // If heap poisoning is enabled, the `temp` reference has
6575 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006576 __ MaybeUnpoisonHeapReference(temp);
6577
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006578 // If heap poisoning is enabled, no need to unpoison the
6579 // heap reference loaded below, as it is only used for a
6580 // comparison with null.
6581 __ cmpl(Address(temp, super_offset), Immediate(0));
6582 __ j(kNotEqual, slow_path->GetEntryLabel());
6583 __ Bind(&do_put);
6584 } else {
6585 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006586 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006587 }
6588
Santiago Aboy Solanesd4229602023-01-03 16:20:50 +00006589 if (instruction->GetWriteBarrierKind() != WriteBarrierKind::kDontEmit) {
6590 DCHECK_EQ(instruction->GetWriteBarrierKind(), WriteBarrierKind::kEmitNoNullCheck)
6591 << " Already null checked so we shouldn't do it again.";
6592 Register card = locations->GetTemp(1).AsRegister<Register>();
6593 codegen_->MarkGCCard(temp,
6594 card,
6595 array,
6596 value.AsRegister<Register>(),
6597 /* emit_null_check= */ false);
6598 }
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006599
6600 if (can_value_be_null) {
6601 DCHECK(do_store.IsLinked());
6602 __ Bind(&do_store);
6603 }
6604
6605 Register source = register_value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006606 if (kPoisonHeapReferences) {
6607 __ movl(temp, register_value);
6608 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006609 source = temp;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006610 }
6611
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006612 __ movl(address, source);
6613
6614 if (can_value_be_null || !needs_type_check) {
6615 codegen_->MaybeRecordImplicitNullCheck(instruction);
6616 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006617
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006618 if (slow_path != nullptr) {
6619 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006620 }
6621
6622 break;
6623 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006624
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006625 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006626 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006627 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006628 if (value.IsRegister()) {
6629 __ movl(address, value.AsRegister<Register>());
6630 } else {
6631 DCHECK(value.IsConstant()) << value;
6632 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
6633 __ movl(address, Immediate(v));
6634 }
6635 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006636 break;
6637 }
6638
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006639 case DataType::Type::kInt64: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006640 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006641 if (value.IsRegisterPair()) {
6642 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
6643 value.AsRegisterPairLow<Register>());
6644 codegen_->MaybeRecordImplicitNullCheck(instruction);
6645 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
6646 value.AsRegisterPairHigh<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006647 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006648 DCHECK(value.IsConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00006649 int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006650 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
6651 Immediate(Low32Bits(val)));
6652 codegen_->MaybeRecordImplicitNullCheck(instruction);
6653 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
6654 Immediate(High32Bits(val)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006655 }
6656 break;
6657 }
6658
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006659 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006660 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006661 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006662 if (value.IsFpuRegister()) {
6663 __ movss(address, value.AsFpuRegister<XmmRegister>());
6664 } else {
6665 DCHECK(value.IsConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00006666 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendell81489372015-11-04 11:30:41 -05006667 __ movl(address, Immediate(v));
6668 }
6669 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006670 break;
6671 }
6672
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006673 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006674 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006675 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006676 if (value.IsFpuRegister()) {
6677 __ movsd(address, value.AsFpuRegister<XmmRegister>());
6678 } else {
6679 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006680 Address address_hi =
6681 CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset + kX86WordSize);
Vladimir Markocde64972023-04-25 16:40:06 +00006682 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Mark Mendell81489372015-11-04 11:30:41 -05006683 __ movl(address, Immediate(Low32Bits(v)));
6684 codegen_->MaybeRecordImplicitNullCheck(instruction);
6685 __ movl(address_hi, Immediate(High32Bits(v)));
6686 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006687 break;
6688 }
6689
Aart Bik66c158e2018-01-31 12:55:04 -08006690 case DataType::Type::kUint32:
6691 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006692 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006693 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07006694 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006695 }
6696}
6697
6698void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006699 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01006700 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04006701 if (!instruction->IsEmittedAtUseSite()) {
6702 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6703 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006704}
6705
6706void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04006707 if (instruction->IsEmittedAtUseSite()) {
6708 return;
6709 }
6710
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006711 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01006712 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00006713 Register obj = locations->InAt(0).AsRegister<Register>();
6714 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006715 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00006716 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07006717 // Mask out most significant bit in case the array is String's array of char.
6718 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006719 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006720 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006721}
6722
6723void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006724 RegisterSet caller_saves = RegisterSet::Empty();
6725 InvokeRuntimeCallingConvention calling_convention;
6726 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6727 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
6728 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05006729 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04006730 HInstruction* length = instruction->InputAt(1);
6731 if (!length->IsEmittedAtUseSite()) {
6732 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
6733 }
jessicahandojo4877b792016-09-08 19:49:13 -07006734 // Need register to see array's length.
6735 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
6736 locations->AddTemp(Location::RequiresRegister());
6737 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006738}
6739
6740void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
jessicahandojo4877b792016-09-08 19:49:13 -07006741 const bool is_string_compressed_char_at =
6742 mirror::kUseStringCompression && instruction->IsStringCharAt();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006743 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05006744 Location index_loc = locations->InAt(0);
6745 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006746 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006747 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006748
Mark Mendell99dbd682015-04-22 16:18:52 -04006749 if (length_loc.IsConstant()) {
6750 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
6751 if (index_loc.IsConstant()) {
6752 // BCE will remove the bounds check if we are guarenteed to pass.
6753 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6754 if (index < 0 || index >= length) {
6755 codegen_->AddSlowPath(slow_path);
6756 __ jmp(slow_path->GetEntryLabel());
6757 } else {
6758 // Some optimization after BCE may have generated this, and we should not
6759 // generate a bounds check if it is a valid range.
6760 }
6761 return;
6762 }
6763
6764 // We have to reverse the jump condition because the length is the constant.
6765 Register index_reg = index_loc.AsRegister<Register>();
6766 __ cmpl(index_reg, Immediate(length));
6767 codegen_->AddSlowPath(slow_path);
6768 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006769 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04006770 HInstruction* array_length = instruction->InputAt(1);
6771 if (array_length->IsEmittedAtUseSite()) {
6772 // Address the length field in the array.
6773 DCHECK(array_length->IsArrayLength());
Vladimir Markocde64972023-04-25 16:40:06 +00006774 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
Mark Mendellee8d9712016-07-12 11:13:15 -04006775 Location array_loc = array_length->GetLocations()->InAt(0);
6776 Address array_len(array_loc.AsRegister<Register>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07006777 if (is_string_compressed_char_at) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006778 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
6779 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07006780 Register length_reg = locations->GetTemp(0).AsRegister<Register>();
6781 __ movl(length_reg, array_len);
6782 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006783 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006784 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04006785 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006786 // Checking bounds for general case:
6787 // Array of char or string's array with feature compression off.
6788 if (index_loc.IsConstant()) {
6789 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6790 __ cmpl(array_len, Immediate(value));
6791 } else {
6792 __ cmpl(array_len, index_loc.AsRegister<Register>());
6793 }
6794 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04006795 }
Mark Mendell99dbd682015-04-22 16:18:52 -04006796 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006797 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04006798 }
6799 codegen_->AddSlowPath(slow_path);
6800 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006801 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006802}
6803
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00006804void LocationsBuilderX86::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006805 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006806}
6807
6808void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006809 if (instruction->GetNext()->IsSuspendCheck() &&
6810 instruction->GetBlock()->GetLoopInformation() != nullptr) {
Vladimir Markocde64972023-04-25 16:40:06 +00006811 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
Vladimir Markobea75ff2017-10-11 20:39:54 +01006812 // The back edge will generate the suspend check.
6813 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6814 }
6815
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006816 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6817}
6818
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006819void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006820 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6821 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07006822 // In suspend check slow path, usually there are no caller-save registers at all.
6823 // If SIMD instructions are present, however, we force spilling all live SIMD
6824 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07006825 locations->SetCustomSlowPathCallerSaves(
6826 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006827}
6828
6829void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006830 HBasicBlock* block = instruction->GetBlock();
6831 if (block->GetLoopInformation() != nullptr) {
6832 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6833 // The back edge will generate the suspend check.
6834 return;
6835 }
6836 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6837 // The goto will generate the suspend check.
6838 return;
6839 }
6840 GenerateSuspendCheck(instruction, nullptr);
6841}
6842
6843void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
6844 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006845 SuspendCheckSlowPathX86* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006846 down_cast<SuspendCheckSlowPathX86*>(instruction->GetSlowPath());
6847 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006848 slow_path =
6849 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006850 instruction->SetSlowPath(slow_path);
6851 codegen_->AddSlowPath(slow_path);
6852 if (successor != nullptr) {
6853 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006854 }
6855 } else {
6856 DCHECK_EQ(slow_path->GetSuccessor(), successor);
6857 }
6858
Vladimir Marko254a8582021-11-29 14:08:37 +00006859 __ fs()->testl(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>().Int32Value()),
6860 Immediate(Thread::SuspendOrCheckpointRequestFlags()));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006861 if (successor == nullptr) {
Vladimir Marko254a8582021-11-29 14:08:37 +00006862 __ j(kNotZero, slow_path->GetEntryLabel());
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006863 __ Bind(slow_path->GetReturnLabel());
6864 } else {
Vladimir Marko254a8582021-11-29 14:08:37 +00006865 __ j(kZero, codegen_->GetLabelOf(successor));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006866 __ jmp(slow_path->GetEntryLabel());
6867 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006868}
6869
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006870X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
6871 return codegen_->GetAssembler();
6872}
6873
Aart Bikcfe50bb2017-12-12 14:54:12 -08006874void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006875 ScratchRegisterScope ensure_scratch(
6876 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6877 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
6878 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
Mark Mendell7c8d0092015-01-26 11:21:33 -05006879
Aart Bikcfe50bb2017-12-12 14:54:12 -08006880 // Now that temp register is available (possibly spilled), move blocks of memory.
6881 for (int i = 0; i < number_of_words; i++) {
6882 __ movl(temp_reg, Address(ESP, src + stack_offset));
6883 __ movl(Address(ESP, dst + stack_offset), temp_reg);
6884 stack_offset += kX86WordSize;
6885 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006886}
6887
6888void ParallelMoveResolverX86::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006889 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006890 Location source = move->GetSource();
6891 Location destination = move->GetDestination();
6892
6893 if (source.IsRegister()) {
6894 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006895 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006896 } else if (destination.IsFpuRegister()) {
6897 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006898 } else {
6899 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006900 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006901 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006902 } else if (source.IsRegisterPair()) {
Vladimir Marko86c87522020-05-11 16:55:55 +01006903 if (destination.IsRegisterPair()) {
6904 __ movl(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
6905 DCHECK_NE(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
6906 __ movl(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
6907 } else if (destination.IsFpuRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006908 size_t elem_size = DataType::Size(DataType::Type::kInt32);
Vladimir Markodec78172020-06-19 15:31:23 +01006909 // Push the 2 source registers to the stack.
Vladimir Marko86c87522020-05-11 16:55:55 +01006910 __ pushl(source.AsRegisterPairHigh<Register>());
6911 __ cfi().AdjustCFAOffset(elem_size);
6912 __ pushl(source.AsRegisterPairLow<Register>());
6913 __ cfi().AdjustCFAOffset(elem_size);
6914 // Load the destination register.
David Brazdil74eb1b22015-12-14 11:44:01 +00006915 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
6916 // And remove the temporary stack space we allocated.
Vladimir Markodec78172020-06-19 15:31:23 +01006917 codegen_->DecreaseFrame(2 * elem_size);
Vladimir Marko86c87522020-05-11 16:55:55 +01006918 } else {
6919 DCHECK(destination.IsDoubleStackSlot());
6920 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
6921 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
6922 source.AsRegisterPairHigh<Register>());
6923 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006924 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006925 if (destination.IsRegister()) {
6926 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
6927 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006928 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006929 } else if (destination.IsRegisterPair()) {
Vladimir Marko86c87522020-05-11 16:55:55 +01006930 size_t elem_size = DataType::Size(DataType::Type::kInt32);
6931 // Create stack space for 2 elements.
Vladimir Markodec78172020-06-19 15:31:23 +01006932 codegen_->IncreaseFrame(2 * elem_size);
Vladimir Marko86c87522020-05-11 16:55:55 +01006933 // Store the source register.
6934 __ movsd(Address(ESP, 0), source.AsFpuRegister<XmmRegister>());
6935 // And pop the values into destination registers.
6936 __ popl(destination.AsRegisterPairLow<Register>());
6937 __ cfi().AdjustCFAOffset(-elem_size);
6938 __ popl(destination.AsRegisterPairHigh<Register>());
6939 __ cfi().AdjustCFAOffset(-elem_size);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006940 } else if (destination.IsStackSlot()) {
6941 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006942 } else if (destination.IsDoubleStackSlot()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006943 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006944 } else {
6945 DCHECK(destination.IsSIMDStackSlot());
6946 __ movups(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05006947 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006948 } else if (source.IsStackSlot()) {
6949 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006950 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006951 } else if (destination.IsFpuRegister()) {
6952 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006953 } else {
6954 DCHECK(destination.IsStackSlot());
Aart Bikcfe50bb2017-12-12 14:54:12 -08006955 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006956 }
6957 } else if (source.IsDoubleStackSlot()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006958 if (destination.IsRegisterPair()) {
6959 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
6960 __ movl(destination.AsRegisterPairHigh<Register>(),
6961 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
6962 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006963 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6964 } else {
6965 DCHECK(destination.IsDoubleStackSlot()) << destination;
Aart Bikcfe50bb2017-12-12 14:54:12 -08006966 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006967 }
Aart Bik5576f372017-03-23 16:17:37 -07006968 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006969 if (destination.IsFpuRegister()) {
6970 __ movups(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6971 } else {
6972 DCHECK(destination.IsSIMDStackSlot());
6973 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
6974 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006975 } else if (source.IsConstant()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006976 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00006977 if (constant->IsIntConstant() || constant->IsNullConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006978 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006979 if (destination.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006980 if (value == 0) {
6981 __ xorl(destination.AsRegister<Register>(), destination.AsRegister<Register>());
6982 } else {
6983 __ movl(destination.AsRegister<Register>(), Immediate(value));
6984 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006985 } else {
6986 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell09b84632015-02-13 17:48:38 -05006987 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006988 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006989 } else if (constant->IsFloatConstant()) {
Vladimir Markocde64972023-04-25 16:40:06 +00006990 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00006991 int32_t value = bit_cast<int32_t, float>(fp_value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006992 Immediate imm(value);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006993 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006994 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
6995 if (value == 0) {
6996 // Easy handling of 0.0.
6997 __ xorps(dest, dest);
6998 } else {
6999 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00007000 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
7001 Register temp = static_cast<Register>(ensure_scratch.GetRegister());
7002 __ movl(temp, Immediate(value));
7003 __ movd(dest, temp);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007004 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05007005 } else {
7006 DCHECK(destination.IsStackSlot()) << destination;
7007 __ movl(Address(ESP, destination.GetStackIndex()), imm);
7008 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007009 } else if (constant->IsLongConstant()) {
Vladimir Markocde64972023-04-25 16:40:06 +00007010 int64_t value = constant->AsLongConstant()->GetValue();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007011 int32_t low_value = Low32Bits(value);
7012 int32_t high_value = High32Bits(value);
7013 Immediate low(low_value);
7014 Immediate high(high_value);
7015 if (destination.IsDoubleStackSlot()) {
7016 __ movl(Address(ESP, destination.GetStackIndex()), low);
7017 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
7018 } else {
7019 __ movl(destination.AsRegisterPairLow<Register>(), low);
7020 __ movl(destination.AsRegisterPairHigh<Register>(), high);
7021 }
7022 } else {
7023 DCHECK(constant->IsDoubleConstant());
Vladimir Markocde64972023-04-25 16:40:06 +00007024 double dbl_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00007025 int64_t value = bit_cast<int64_t, double>(dbl_value);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007026 int32_t low_value = Low32Bits(value);
7027 int32_t high_value = High32Bits(value);
7028 Immediate low(low_value);
7029 Immediate high(high_value);
7030 if (destination.IsFpuRegister()) {
7031 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
7032 if (value == 0) {
7033 // Easy handling of 0.0.
7034 __ xorpd(dest, dest);
7035 } else {
7036 __ pushl(high);
Vladimir Marko86c87522020-05-11 16:55:55 +01007037 __ cfi().AdjustCFAOffset(4);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007038 __ pushl(low);
Vladimir Marko86c87522020-05-11 16:55:55 +01007039 __ cfi().AdjustCFAOffset(4);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007040 __ movsd(dest, Address(ESP, 0));
Vladimir Markodec78172020-06-19 15:31:23 +01007041 codegen_->DecreaseFrame(8);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007042 }
7043 } else {
7044 DCHECK(destination.IsDoubleStackSlot()) << destination;
7045 __ movl(Address(ESP, destination.GetStackIndex()), low);
7046 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
7047 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01007048 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007049 } else {
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00007050 LOG(FATAL) << "Unimplemented move: " << destination << " <- " << source;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007051 }
7052}
7053
Mark Mendella5c19ce2015-04-01 12:51:05 -04007054void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00007055 Register suggested_scratch = reg == EAX ? EBX : EAX;
7056 ScratchRegisterScope ensure_scratch(
7057 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
7058
7059 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
7060 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
7061 __ movl(Address(ESP, mem + stack_offset), reg);
7062 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007063}
7064
Mark Mendell7c8d0092015-01-26 11:21:33 -05007065void ParallelMoveResolverX86::Exchange32(XmmRegister reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00007066 ScratchRegisterScope ensure_scratch(
7067 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
7068
7069 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
7070 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
7071 __ movl(temp_reg, Address(ESP, mem + stack_offset));
7072 __ movss(Address(ESP, mem + stack_offset), reg);
7073 __ movd(reg, temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05007074}
7075
Aart Bikcfe50bb2017-12-12 14:54:12 -08007076void ParallelMoveResolverX86::Exchange128(XmmRegister reg, int mem) {
7077 size_t extra_slot = 4 * kX86WordSize;
Vladimir Markodec78172020-06-19 15:31:23 +01007078 codegen_->IncreaseFrame(extra_slot);
Aart Bikcfe50bb2017-12-12 14:54:12 -08007079 __ movups(Address(ESP, 0), XmmRegister(reg));
7080 ExchangeMemory(0, mem + extra_slot, 4);
7081 __ movups(XmmRegister(reg), Address(ESP, 0));
Vladimir Markodec78172020-06-19 15:31:23 +01007082 codegen_->DecreaseFrame(extra_slot);
Aart Bikcfe50bb2017-12-12 14:54:12 -08007083}
7084
7085void ParallelMoveResolverX86::ExchangeMemory(int mem1, int mem2, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00007086 ScratchRegisterScope ensure_scratch1(
7087 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007088
Guillaume Sancheze14590b2015-04-15 18:57:27 +00007089 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
7090 ScratchRegisterScope ensure_scratch2(
7091 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007092
Guillaume Sancheze14590b2015-04-15 18:57:27 +00007093 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
7094 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
Aart Bikcfe50bb2017-12-12 14:54:12 -08007095
7096 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
7097 for (int i = 0; i < number_of_words; i++) {
7098 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
7099 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
7100 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
7101 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
7102 stack_offset += kX86WordSize;
7103 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007104}
7105
7106void ParallelMoveResolverX86::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01007107 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007108 Location source = move->GetSource();
7109 Location destination = move->GetDestination();
7110
7111 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell90979812015-07-28 16:41:21 -04007112 // Use XOR swap algorithm to avoid serializing XCHG instruction or using a temporary.
7113 DCHECK_NE(destination.AsRegister<Register>(), source.AsRegister<Register>());
7114 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
7115 __ xorl(source.AsRegister<Register>(), destination.AsRegister<Register>());
7116 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007117 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007118 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007119 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007120 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007121 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08007122 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05007123 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
7124 // Use XOR Swap algorithm to avoid a temporary.
7125 DCHECK_NE(source.reg(), destination.reg());
7126 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
7127 __ xorpd(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
7128 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
7129 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
7130 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
7131 } else if (destination.IsFpuRegister() && source.IsStackSlot()) {
7132 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007133 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
7134 // Take advantage of the 16 bytes in the XMM register.
7135 XmmRegister reg = source.AsFpuRegister<XmmRegister>();
7136 Address stack(ESP, destination.GetStackIndex());
7137 // Load the double into the high doubleword.
7138 __ movhpd(reg, stack);
7139
7140 // Store the low double into the destination.
7141 __ movsd(stack, reg);
7142
7143 // Move the high double to the low double.
7144 __ psrldq(reg, Immediate(8));
7145 } else if (destination.IsFpuRegister() && source.IsDoubleStackSlot()) {
7146 // Take advantage of the 16 bytes in the XMM register.
7147 XmmRegister reg = destination.AsFpuRegister<XmmRegister>();
7148 Address stack(ESP, source.GetStackIndex());
7149 // Load the double into the high doubleword.
7150 __ movhpd(reg, stack);
7151
7152 // Store the low double into the destination.
7153 __ movsd(stack, reg);
7154
7155 // Move the high double to the low double.
7156 __ psrldq(reg, Immediate(8));
7157 } else if (destination.IsDoubleStackSlot() && source.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08007158 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
7159 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
7160 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
7161 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
7162 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
7163 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
7164 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007165 } else {
Mark Mendell7c8d0092015-01-26 11:21:33 -05007166 LOG(FATAL) << "Unimplemented: source: " << source << ", destination: " << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01007167 }
7168}
7169
7170void ParallelMoveResolverX86::SpillScratch(int reg) {
7171 __ pushl(static_cast<Register>(reg));
7172}
7173
7174void ParallelMoveResolverX86::RestoreScratch(int reg) {
7175 __ popl(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01007176}
7177
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007178HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
7179 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007180 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007181 case HLoadClass::LoadKind::kInvalid:
7182 LOG(FATAL) << "UNREACHABLE";
7183 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007184 case HLoadClass::LoadKind::kReferrersClass:
7185 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007186 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007187 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007188 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko8f63f102020-09-28 12:10:28 +01007189 case HLoadClass::LoadKind::kBssEntryPublic:
7190 case HLoadClass::LoadKind::kBssEntryPackage:
Vladimir Marko695348f2020-05-19 14:42:02 +01007191 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007192 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007193 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007194 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01007195 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007196 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007197 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007198 break;
7199 }
7200 return desired_class_load_kind;
7201}
7202
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007203void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007204 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007205 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007206 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00007207 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007208 cls,
7209 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00007210 Location::RegisterLocation(EAX));
Vladimir Markoea4c1262017-02-06 19:59:33 +00007211 DCHECK_EQ(calling_convention.GetRegisterAt(0), EAX);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007212 return;
7213 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01007214 DCHECK_EQ(cls->NeedsAccessCheck(),
7215 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
7216 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007217
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00007218 const bool requires_read_barrier = gUseReadBarrier && !cls->IsInBootImage();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007219 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007220 ? LocationSummary::kCallOnSlowPath
7221 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01007222 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007223 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007224 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01007225 }
7226
Vladimir Marko8f63f102020-09-28 12:10:28 +01007227 if (load_kind == HLoadClass::LoadKind::kReferrersClass || cls->HasPcRelativeLoadKind()) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007228 locations->SetInAt(0, Location::RequiresRegister());
7229 }
7230 locations->SetOut(Location::RequiresRegister());
Vladimir Marko8f63f102020-09-28 12:10:28 +01007231 if (call_kind == LocationSummary::kCallOnSlowPath && cls->HasPcRelativeLoadKind()) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00007232 if (!gUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00007233 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007234 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00007235 } else {
7236 // For non-Baker read barrier we have a temp-clobbering call.
7237 }
7238 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007239}
7240
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007241Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01007242 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007243 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007244 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007245 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007246 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007247 PatchInfo<Label>* info = &jit_class_patches_.back();
7248 return &info->label;
7249}
7250
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007251// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7252// move.
7253void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007254 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007255 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007256 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01007257 return;
7258 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01007259 DCHECK_EQ(cls->NeedsAccessCheck(),
7260 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
7261 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Calin Juravle580b6092015-10-06 17:35:58 +01007262
Vladimir Marko41559982017-01-06 14:04:23 +00007263 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007264 Location out_loc = locations->Out();
7265 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007266
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007267 bool generate_null_check = false;
Dmitrii Ishcheikind7d1e202023-05-15 15:42:52 +00007268 const ReadBarrierOption read_barrier_option =
7269 cls->IsInBootImage() ? kWithoutReadBarrier : GetCompilerReadBarrierOption();
Vladimir Marko41559982017-01-06 14:04:23 +00007270 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007271 case HLoadClass::LoadKind::kReferrersClass: {
7272 DCHECK(!cls->CanCallRuntime());
7273 DCHECK(!cls->MustGenerateClinitCheck());
7274 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7275 Register current_method = locations->InAt(0).AsRegister<Register>();
7276 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007277 cls,
7278 out_loc,
7279 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08007280 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007281 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007282 break;
7283 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007284 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01007285 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
7286 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007287 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007288 Register method_address = locations->InAt(0).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007289 __ leal(out, Address(method_address, CodeGeneratorX86::kPlaceholder32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007290 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007291 break;
7292 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007293 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007294 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7295 Register method_address = locations->InAt(0).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007296 __ movl(out, Address(method_address, CodeGeneratorX86::kPlaceholder32BitOffset));
Vladimir Markocde64972023-04-25 16:40:06 +00007297 codegen_->RecordBootImageRelRoPatch(cls->InputAt(0)->AsX86ComputeBaseMethodAddress(),
Vladimir Markode91ca92020-10-27 13:41:40 +00007298 CodeGenerator::GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007299 break;
7300 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01007301 case HLoadClass::LoadKind::kBssEntry:
7302 case HLoadClass::LoadKind::kBssEntryPublic:
7303 case HLoadClass::LoadKind::kBssEntryPackage: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007304 Register method_address = locations->InAt(0).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007305 Address address(method_address, CodeGeneratorX86::kPlaceholder32BitOffset);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007306 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
7307 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01007308 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007309 generate_null_check = true;
7310 break;
7311 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007312 case HLoadClass::LoadKind::kJitBootImageAddress: {
7313 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
7314 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
7315 DCHECK_NE(address, 0u);
7316 __ movl(out, Immediate(address));
7317 break;
7318 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007319 case HLoadClass::LoadKind::kJitTableAddress: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007320 Address address = Address::Absolute(CodeGeneratorX86::kPlaceholder32BitOffset);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007321 Label* fixup_label = codegen_->NewJitRootClassPatch(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007322 cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007323 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00007324 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007325 break;
7326 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007327 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007328 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007329 LOG(FATAL) << "UNREACHABLE";
7330 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007331 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007332
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007333 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7334 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01007335 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007336 codegen_->AddSlowPath(slow_path);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007337
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007338 if (generate_null_check) {
7339 __ testl(out, out);
7340 __ j(kEqual, slow_path->GetEntryLabel());
7341 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007342
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007343 if (cls->MustGenerateClinitCheck()) {
7344 GenerateClassInitializationCheck(slow_path, out);
7345 } else {
7346 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007347 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007348 }
7349}
7350
Orion Hodsondbaa5c72018-05-10 08:22:46 +01007351void LocationsBuilderX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
7352 InvokeRuntimeCallingConvention calling_convention;
7353 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7354 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
7355}
7356
7357void InstructionCodeGeneratorX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
7358 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
7359}
7360
Orion Hodson18259d72018-04-12 11:18:23 +01007361void LocationsBuilderX86::VisitLoadMethodType(HLoadMethodType* load) {
7362 InvokeRuntimeCallingConvention calling_convention;
7363 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7364 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
7365}
7366
7367void InstructionCodeGeneratorX86::VisitLoadMethodType(HLoadMethodType* load) {
7368 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
7369}
7370
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007371void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
7372 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007373 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007374 locations->SetInAt(0, Location::RequiresRegister());
7375 if (check->HasUses()) {
7376 locations->SetOut(Location::SameAsFirstInput());
7377 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007378 // Rely on the type initialization to save everything we need.
7379 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007380}
7381
7382void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007383 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01007384 SlowPathCode* slow_path =
7385 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007386 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00007387 GenerateClassInitializationCheck(slow_path,
7388 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007389}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007390
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007391void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07007392 SlowPathCode* slow_path, Register class_reg) {
Vladimir Markobf121912019-06-04 13:49:05 +01007393 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00007394 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007395 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007396}
7397
Vladimir Marko175e7862018-03-27 09:03:13 +00007398void InstructionCodeGeneratorX86::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
7399 Register temp) {
7400 uint32_t path_to_root = check->GetBitstringPathToRoot();
7401 uint32_t mask = check->GetBitstringMask();
7402 DCHECK(IsPowerOfTwo(mask + 1));
7403 size_t mask_bits = WhichPowerOf2(mask + 1);
7404
7405 if (mask_bits == 16u) {
7406 // Compare the bitstring in memory.
7407 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
7408 } else {
7409 // /* uint32_t */ temp = temp->status_
7410 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
7411 // Compare the bitstring bits using SUB.
7412 __ subl(temp, Immediate(path_to_root));
7413 // Shift out bits that do not contribute to the comparison.
7414 __ shll(temp, Immediate(32u - mask_bits));
7415 }
7416}
7417
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007418HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
7419 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007420 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007421 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007422 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007423 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko695348f2020-05-19 14:42:02 +01007424 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007425 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007426 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007427 case HLoadString::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01007428 DCHECK(GetCompilerOptions().IsJitCompiler());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007429 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007430 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007431 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007432 }
7433 return desired_string_load_kind;
7434}
7435
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00007436void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007437 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007438 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007439 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007440 if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007441 load_kind == HLoadString::LoadKind::kBootImageRelRo ||
Vladimir Markoaad75c62016-10-03 08:46:48 +00007442 load_kind == HLoadString::LoadKind::kBssEntry) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007443 locations->SetInAt(0, Location::RequiresRegister());
7444 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007445 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth175d09b2016-08-31 16:26:01 -07007446 locations->SetOut(Location::RegisterLocation(EAX));
7447 } else {
7448 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007449 if (load_kind == HLoadString::LoadKind::kBssEntry) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00007450 if (!gUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00007451 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007452 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007453 } else {
7454 // For non-Baker read barrier we have a temp-clobbering call.
7455 }
7456 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07007457 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00007458}
7459
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007460Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01007461 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007462 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007463 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007464 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007465 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007466 PatchInfo<Label>* info = &jit_string_patches_.back();
7467 return &info->label;
7468}
7469
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007470// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7471// move.
7472void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01007473 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007474 Location out_loc = locations->Out();
7475 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007476
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007477 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007478 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01007479 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
7480 codegen_->GetCompilerOptions().IsBootImageExtension());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007481 Register method_address = locations->InAt(0).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007482 __ leal(out, Address(method_address, CodeGeneratorX86::kPlaceholder32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007483 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007484 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007485 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007486 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007487 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7488 Register method_address = locations->InAt(0).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007489 __ movl(out, Address(method_address, CodeGeneratorX86::kPlaceholder32BitOffset));
Vladimir Markocde64972023-04-25 16:40:06 +00007490 codegen_->RecordBootImageRelRoPatch(load->InputAt(0)->AsX86ComputeBaseMethodAddress(),
Vladimir Markode91ca92020-10-27 13:41:40 +00007491 CodeGenerator::GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007492 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007493 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007494 case HLoadString::LoadKind::kBssEntry: {
7495 Register method_address = locations->InAt(0).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007496 Address address = Address(method_address, CodeGeneratorX86::kPlaceholder32BitOffset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007497 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007498 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Dmitrii Ishcheikind7d1e202023-05-15 15:42:52 +00007499 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, GetCompilerReadBarrierOption());
Vladimir Markod5fd5c32019-07-02 14:46:32 +01007500 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007501 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007502 codegen_->AddSlowPath(slow_path);
7503 __ testl(out, out);
7504 __ j(kEqual, slow_path->GetEntryLabel());
7505 __ Bind(slow_path->GetExitLabel());
7506 return;
7507 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007508 case HLoadString::LoadKind::kJitBootImageAddress: {
7509 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
7510 DCHECK_NE(address, 0u);
7511 __ movl(out, Immediate(address));
7512 return;
7513 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007514 case HLoadString::LoadKind::kJitTableAddress: {
Vladimir Marko4ef451a2020-07-23 09:54:27 +00007515 Address address = Address::Absolute(CodeGeneratorX86::kPlaceholder32BitOffset);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007516 Label* fixup_label = codegen_->NewJitRootStringPatch(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007517 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007518 // /* GcRoot<mirror::String> */ out = *address
Dmitrii Ishcheikind7d1e202023-05-15 15:42:52 +00007519 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, GetCompilerReadBarrierOption());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007520 return;
7521 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007522 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007523 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007524 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007525
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007526 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth175d09b2016-08-31 16:26:01 -07007527 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007528 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007529 __ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex().index_));
Christina Wadsworth175d09b2016-08-31 16:26:01 -07007530 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7531 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00007532}
7533
David Brazdilcb1c0552015-08-04 16:22:25 +01007534static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007535 return Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01007536}
7537
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007538void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
7539 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007540 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007541 locations->SetOut(Location::RequiresRegister());
7542}
7543
7544void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01007545 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
7546}
7547
7548void LocationsBuilderX86::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007549 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01007550}
7551
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00007552void InstructionCodeGeneratorX86::VisitClearException([[maybe_unused]] HClearException* clear) {
David Brazdilcb1c0552015-08-04 16:22:25 +01007553 __ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007554}
7555
7556void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007557 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7558 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007559 InvokeRuntimeCallingConvention calling_convention;
7560 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7561}
7562
7563void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01007564 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00007565 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007566}
7567
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007568// Temp is used for read barrier.
7569static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00007570 if (gUseReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00007571 !kUseBakerReadBarrier &&
7572 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain7c1559a2015-12-15 10:55:36 +00007573 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007574 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
7575 return 1;
7576 }
7577 return 0;
7578}
7579
Vladimir Marko9f8d3122018-04-06 13:47:59 +01007580// Interface case has 2 temps, one for holding the number of interfaces, one for the current
7581// interface pointer, the current interface is compared in memory.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007582// The other checks have one temp for loading the object's class.
7583static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007584 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007585 return 2;
7586 }
7587 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007588}
7589
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007590void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007591 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00007592 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01007593 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00007594 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007595 case TypeCheckKind::kExactCheck:
7596 case TypeCheckKind::kAbstractClassCheck:
7597 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00007598 case TypeCheckKind::kArrayObjectCheck: {
7599 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
7600 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
7601 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007602 break;
Vladimir Marko87584542017-12-12 17:47:52 +00007603 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007604 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00007605 case TypeCheckKind::kUnresolvedCheck:
7606 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007607 call_kind = LocationSummary::kCallOnSlowPath;
7608 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00007609 case TypeCheckKind::kBitstringCheck:
7610 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007611 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007612
Vladimir Markoca6fff82017-10-03 14:49:14 +01007613 LocationSummary* locations =
7614 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01007615 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007616 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01007617 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007618 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007619 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
Vladimir Markof76ca8c2023-04-05 15:24:41 +00007620 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)));
7621 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)));
7622 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)));
Vladimir Marko175e7862018-03-27 09:03:13 +00007623 } else {
7624 locations->SetInAt(1, Location::Any());
7625 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007626 // Note that TypeCheckSlowPathX86 uses this "out" register too.
7627 locations->SetOut(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007628 // When read barriers are enabled, we need a temporary register for some cases.
7629 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007630}
7631
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007632void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007633 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007634 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007635 Location obj_loc = locations->InAt(0);
7636 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007637 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007638 Location out_loc = locations->Out();
7639 Register out = out_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007640 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7641 DCHECK_LE(num_temps, 1u);
7642 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007643 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007644 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7645 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7646 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07007647 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007648 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007649
7650 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007651 // Avoid null check if we know obj is not null.
7652 if (instruction->MustDoNullCheck()) {
7653 __ testl(obj, obj);
7654 __ j(kEqual, &zero);
7655 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007656
Roland Levillain7c1559a2015-12-15 10:55:36 +00007657 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007658 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007659 ReadBarrierOption read_barrier_option =
7660 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007661 // /* HeapReference<Class> */ out = obj->klass_
7662 GenerateReferenceLoadTwoRegisters(instruction,
7663 out_loc,
7664 obj_loc,
7665 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007666 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007667 if (cls.IsRegister()) {
7668 __ cmpl(out, cls.AsRegister<Register>());
7669 } else {
7670 DCHECK(cls.IsStackSlot()) << cls;
7671 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7672 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007673
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007674 // Classes must be equal for the instanceof to succeed.
7675 __ j(kNotEqual, &zero);
7676 __ movl(out, Immediate(1));
7677 __ jmp(&done);
7678 break;
7679 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007680
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007681 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007682 ReadBarrierOption read_barrier_option =
7683 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007684 // /* HeapReference<Class> */ out = obj->klass_
7685 GenerateReferenceLoadTwoRegisters(instruction,
7686 out_loc,
7687 obj_loc,
7688 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007689 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007690 // If the class is abstract, we eagerly fetch the super class of the
7691 // object to avoid doing a comparison we know will fail.
7692 NearLabel loop;
7693 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007694 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007695 GenerateReferenceLoadOneRegister(instruction,
7696 out_loc,
7697 super_offset,
7698 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007699 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007700 __ testl(out, out);
7701 // If `out` is null, we use it for the result, and jump to `done`.
7702 __ j(kEqual, &done);
7703 if (cls.IsRegister()) {
7704 __ cmpl(out, cls.AsRegister<Register>());
7705 } else {
7706 DCHECK(cls.IsStackSlot()) << cls;
7707 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7708 }
7709 __ j(kNotEqual, &loop);
7710 __ movl(out, Immediate(1));
7711 if (zero.IsLinked()) {
7712 __ jmp(&done);
7713 }
7714 break;
7715 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007716
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007717 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007718 ReadBarrierOption read_barrier_option =
7719 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007720 // /* HeapReference<Class> */ out = obj->klass_
7721 GenerateReferenceLoadTwoRegisters(instruction,
7722 out_loc,
7723 obj_loc,
7724 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007725 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007726 // Walk over the class hierarchy to find a match.
7727 NearLabel loop, success;
7728 __ Bind(&loop);
7729 if (cls.IsRegister()) {
7730 __ cmpl(out, cls.AsRegister<Register>());
7731 } else {
7732 DCHECK(cls.IsStackSlot()) << cls;
7733 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7734 }
7735 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007736 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007737 GenerateReferenceLoadOneRegister(instruction,
7738 out_loc,
7739 super_offset,
7740 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007741 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007742 __ testl(out, out);
7743 __ j(kNotEqual, &loop);
7744 // If `out` is null, we use it for the result, and jump to `done`.
7745 __ jmp(&done);
7746 __ Bind(&success);
7747 __ movl(out, Immediate(1));
7748 if (zero.IsLinked()) {
7749 __ jmp(&done);
7750 }
7751 break;
7752 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007753
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007754 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007755 ReadBarrierOption read_barrier_option =
7756 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007757 // /* HeapReference<Class> */ out = obj->klass_
7758 GenerateReferenceLoadTwoRegisters(instruction,
7759 out_loc,
7760 obj_loc,
7761 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007762 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007763 // Do an exact check.
7764 NearLabel exact_check;
7765 if (cls.IsRegister()) {
7766 __ cmpl(out, cls.AsRegister<Register>());
7767 } else {
7768 DCHECK(cls.IsStackSlot()) << cls;
7769 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7770 }
7771 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007772 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007773 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007774 GenerateReferenceLoadOneRegister(instruction,
7775 out_loc,
7776 component_offset,
7777 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007778 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007779 __ testl(out, out);
7780 // If `out` is null, we use it for the result, and jump to `done`.
7781 __ j(kEqual, &done);
7782 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
7783 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007784 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007785 __ movl(out, Immediate(1));
7786 __ jmp(&done);
7787 break;
7788 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007789
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007790 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007791 // No read barrier since the slow path will retry upon failure.
7792 // /* HeapReference<Class> */ out = obj->klass_
7793 GenerateReferenceLoadTwoRegisters(instruction,
7794 out_loc,
7795 obj_loc,
7796 class_offset,
7797 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007798 if (cls.IsRegister()) {
7799 __ cmpl(out, cls.AsRegister<Register>());
7800 } else {
7801 DCHECK(cls.IsStackSlot()) << cls;
7802 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7803 }
7804 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007805 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007806 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007807 codegen_->AddSlowPath(slow_path);
7808 __ j(kNotEqual, slow_path->GetEntryLabel());
7809 __ movl(out, Immediate(1));
7810 if (zero.IsLinked()) {
7811 __ jmp(&done);
7812 }
7813 break;
7814 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007815
Calin Juravle98893e12015-10-02 21:05:03 +01007816 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00007817 case TypeCheckKind::kInterfaceCheck: {
7818 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007819 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00007820 // cases.
7821 //
7822 // We cannot directly call the InstanceofNonTrivial runtime
7823 // entry point without resorting to a type checking slow path
7824 // here (i.e. by calling InvokeRuntime directly), as it would
7825 // require to assign fixed registers for the inputs of this
7826 // HInstanceOf instruction (following the runtime calling
7827 // convention), which might be cluttered by the potential first
7828 // read barrier emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007829 //
7830 // TODO: Introduce a new runtime entry point taking the object
7831 // to test (instead of its class) as argument, and let it deal
7832 // with the read barrier issues. This will let us refactor this
7833 // case of the `switch` code as it was previously (with a direct
7834 // call to the runtime not using a type checking slow path).
7835 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007836 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007837 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007838 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007839 codegen_->AddSlowPath(slow_path);
7840 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007841 if (zero.IsLinked()) {
7842 __ jmp(&done);
7843 }
7844 break;
7845 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007846
7847 case TypeCheckKind::kBitstringCheck: {
7848 // /* HeapReference<Class> */ temp = obj->klass_
7849 GenerateReferenceLoadTwoRegisters(instruction,
7850 out_loc,
7851 obj_loc,
7852 class_offset,
7853 kWithoutReadBarrier);
7854
7855 GenerateBitstringTypeCheckCompare(instruction, out);
7856 __ j(kNotEqual, &zero);
7857 __ movl(out, Immediate(1));
7858 __ jmp(&done);
7859 break;
7860 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007861 }
7862
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007863 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007864 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007865 __ xorl(out, out);
7866 }
7867
7868 if (done.IsLinked()) {
7869 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007870 }
7871
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007872 if (slow_path != nullptr) {
7873 __ Bind(slow_path->GetExitLabel());
7874 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007875}
7876
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007877void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007878 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00007879 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007880 LocationSummary* locations =
7881 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007882 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007883 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
7884 // Require a register for the interface check since there is a loop that compares the class to
7885 // a memory address.
7886 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007887 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
Vladimir Markof76ca8c2023-04-05 15:24:41 +00007888 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)));
7889 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)));
7890 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007891 } else {
7892 locations->SetInAt(1, Location::Any());
7893 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01007894 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007895 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
7896}
7897
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007898void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007899 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007900 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007901 Location obj_loc = locations->InAt(0);
7902 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007903 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007904 Location temp_loc = locations->GetTemp(0);
7905 Register temp = temp_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007906 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
7907 DCHECK_GE(num_temps, 1u);
7908 DCHECK_LE(num_temps, 2u);
7909 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
7910 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7911 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7912 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7913 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
7914 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
7915 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
7916 const uint32_t object_array_data_offset =
7917 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007918
Vladimir Marko87584542017-12-12 17:47:52 +00007919 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007920 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007921 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
7922 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007923 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007924
Roland Levillain0d5a2812015-11-13 10:07:31 +00007925 NearLabel done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007926 // Avoid null check if we know obj is not null.
7927 if (instruction->MustDoNullCheck()) {
7928 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007929 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007930 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007931
Roland Levillain0d5a2812015-11-13 10:07:31 +00007932 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007933 case TypeCheckKind::kExactCheck:
7934 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007935 // /* HeapReference<Class> */ temp = obj->klass_
7936 GenerateReferenceLoadTwoRegisters(instruction,
7937 temp_loc,
7938 obj_loc,
7939 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007940 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007941
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007942 if (cls.IsRegister()) {
7943 __ cmpl(temp, cls.AsRegister<Register>());
7944 } else {
7945 DCHECK(cls.IsStackSlot()) << cls;
7946 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7947 }
7948 // Jump to slow path for throwing the exception or doing a
7949 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007950 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007951 break;
7952 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007953
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007954 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007955 // /* HeapReference<Class> */ temp = obj->klass_
7956 GenerateReferenceLoadTwoRegisters(instruction,
7957 temp_loc,
7958 obj_loc,
7959 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007960 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007961
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007962 // If the class is abstract, we eagerly fetch the super class of the
7963 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007964 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007965 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007966 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007967 GenerateReferenceLoadOneRegister(instruction,
7968 temp_loc,
7969 super_offset,
7970 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007971 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007972
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007973 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7974 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007975 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007976 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00007977
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007978 // Otherwise, compare the classes
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007979 if (cls.IsRegister()) {
7980 __ cmpl(temp, cls.AsRegister<Register>());
7981 } else {
7982 DCHECK(cls.IsStackSlot()) << cls;
7983 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7984 }
7985 __ j(kNotEqual, &loop);
7986 break;
7987 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007988
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007989 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007990 // /* HeapReference<Class> */ temp = obj->klass_
7991 GenerateReferenceLoadTwoRegisters(instruction,
7992 temp_loc,
7993 obj_loc,
7994 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007995 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007996
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007997 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007998 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007999 __ Bind(&loop);
8000 if (cls.IsRegister()) {
8001 __ cmpl(temp, cls.AsRegister<Register>());
8002 } else {
8003 DCHECK(cls.IsStackSlot()) << cls;
8004 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
8005 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01008006 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008007
Roland Levillain0d5a2812015-11-13 10:07:31 +00008008 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08008009 GenerateReferenceLoadOneRegister(instruction,
8010 temp_loc,
8011 super_offset,
8012 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008013 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008014
8015 // If the class reference currently in `temp` is not null, jump
8016 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008017 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08008018 __ j(kNotZero, &loop);
8019 // Otherwise, jump to the slow path to throw the exception.;
Roland Levillain0d5a2812015-11-13 10:07:31 +00008020 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008021 break;
8022 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00008023
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008024 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07008025 // /* HeapReference<Class> */ temp = obj->klass_
8026 GenerateReferenceLoadTwoRegisters(instruction,
8027 temp_loc,
8028 obj_loc,
8029 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008030 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07008031
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01008032 // Do an exact check.
8033 if (cls.IsRegister()) {
8034 __ cmpl(temp, cls.AsRegister<Register>());
8035 } else {
8036 DCHECK(cls.IsStackSlot()) << cls;
8037 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
8038 }
8039 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008040
8041 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00008042 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08008043 GenerateReferenceLoadOneRegister(instruction,
8044 temp_loc,
8045 component_offset,
8046 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008047 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008048
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08008049 // If the component type is null (i.e. the object not an array), jump to the slow path to
8050 // throw the exception. Otherwise proceed with the check.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008051 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08008052 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00008053
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008054 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08008055 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008056 break;
8057 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00008058
Calin Juravle98893e12015-10-02 21:05:03 +01008059 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07008060 // We always go into the type check slow path for the unresolved check case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00008061 // We cannot directly call the CheckCast runtime entry point
8062 // without resorting to a type checking slow path here (i.e. by
8063 // calling InvokeRuntime directly), as it would require to
8064 // assign fixed registers for the inputs of this HInstanceOf
8065 // instruction (following the runtime calling convention), which
8066 // might be cluttered by the potential first read barrier
8067 // emission at the beginning of this method.
8068 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008069 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07008070
8071 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00008072 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
8073 // We can not get false positives by doing this.
8074 // /* HeapReference<Class> */ temp = obj->klass_
8075 GenerateReferenceLoadTwoRegisters(instruction,
8076 temp_loc,
8077 obj_loc,
8078 class_offset,
8079 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07008080
Vladimir Markoe619f6c2017-12-12 16:00:01 +00008081 // /* HeapReference<Class> */ temp = temp->iftable_
8082 GenerateReferenceLoadTwoRegisters(instruction,
8083 temp_loc,
8084 temp_loc,
8085 iftable_offset,
8086 kWithoutReadBarrier);
8087 // Iftable is never null.
8088 __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
8089 // Maybe poison the `cls` for direct comparison with memory.
8090 __ MaybePoisonHeapReference(cls.AsRegister<Register>());
8091 // Loop through the iftable and check if any class matches.
8092 NearLabel start_loop;
8093 __ Bind(&start_loop);
8094 // Need to subtract first to handle the empty array case.
8095 __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
8096 __ j(kNegative, type_check_slow_path->GetEntryLabel());
8097 // Go to next interface if the classes do not match.
8098 __ cmpl(cls.AsRegister<Register>(),
8099 CodeGeneratorX86::ArrayAddress(temp,
8100 maybe_temp2_loc,
8101 TIMES_4,
8102 object_array_data_offset));
8103 __ j(kNotEqual, &start_loop);
8104 // If `cls` was poisoned above, unpoison it.
8105 __ MaybeUnpoisonHeapReference(cls.AsRegister<Register>());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07008106 break;
8107 }
Vladimir Marko175e7862018-03-27 09:03:13 +00008108
8109 case TypeCheckKind::kBitstringCheck: {
8110 // /* HeapReference<Class> */ temp = obj->klass_
8111 GenerateReferenceLoadTwoRegisters(instruction,
8112 temp_loc,
8113 obj_loc,
8114 class_offset,
8115 kWithoutReadBarrier);
8116
8117 GenerateBitstringTypeCheckCompare(instruction, temp);
8118 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
8119 break;
8120 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00008121 }
8122 __ Bind(&done);
8123
Roland Levillain0d5a2812015-11-13 10:07:31 +00008124 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00008125}
8126
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00008127void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008128 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
8129 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00008130 InvokeRuntimeCallingConvention calling_convention;
8131 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8132}
8133
8134void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01008135 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
8136 : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01008137 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01008138 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008139 if (instruction->IsEnter()) {
8140 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
8141 } else {
8142 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
8143 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00008144}
8145
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05308146void LocationsBuilderX86::VisitX86AndNot(HX86AndNot* instruction) {
8147 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
8148 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
8149 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
8150 locations->SetInAt(0, Location::RequiresRegister());
8151 locations->SetInAt(1, Location::RequiresRegister());
8152 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8153}
8154
8155void InstructionCodeGeneratorX86::VisitX86AndNot(HX86AndNot* instruction) {
8156 LocationSummary* locations = instruction->GetLocations();
8157 Location first = locations->InAt(0);
8158 Location second = locations->InAt(1);
8159 Location dest = locations->Out();
8160 if (instruction->GetResultType() == DataType::Type::kInt32) {
8161 __ andn(dest.AsRegister<Register>(),
8162 first.AsRegister<Register>(),
8163 second.AsRegister<Register>());
8164 } else {
8165 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
8166 __ andn(dest.AsRegisterPairLow<Register>(),
8167 first.AsRegisterPairLow<Register>(),
8168 second.AsRegisterPairLow<Register>());
8169 __ andn(dest.AsRegisterPairHigh<Register>(),
8170 first.AsRegisterPairHigh<Register>(),
8171 second.AsRegisterPairHigh<Register>());
8172 }
8173}
8174
8175void LocationsBuilderX86::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
8176 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
8177 DCHECK(instruction->GetType() == DataType::Type::kInt32) << instruction->GetType();
8178 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
8179 locations->SetInAt(0, Location::RequiresRegister());
8180 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8181}
8182
8183void InstructionCodeGeneratorX86::VisitX86MaskOrResetLeastSetBit(
8184 HX86MaskOrResetLeastSetBit* instruction) {
8185 LocationSummary* locations = instruction->GetLocations();
8186 Location src = locations->InAt(0);
8187 Location dest = locations->Out();
8188 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
8189 switch (instruction->GetOpKind()) {
8190 case HInstruction::kAnd:
8191 __ blsr(dest.AsRegister<Register>(), src.AsRegister<Register>());
8192 break;
8193 case HInstruction::kXor:
8194 __ blsmsk(dest.AsRegister<Register>(), src.AsRegister<Register>());
8195 break;
8196 default:
8197 LOG(FATAL) << "Unreachable";
8198 }
8199}
8200
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008201void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
8202void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
8203void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
8204
8205void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
8206 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008207 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008208 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
8209 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008210 locations->SetInAt(0, Location::RequiresRegister());
8211 locations->SetInAt(1, Location::Any());
8212 locations->SetOut(Location::SameAsFirstInput());
8213}
8214
8215void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
8216 HandleBitwiseOperation(instruction);
8217}
8218
8219void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
8220 HandleBitwiseOperation(instruction);
8221}
8222
8223void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
8224 HandleBitwiseOperation(instruction);
8225}
8226
8227void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
8228 LocationSummary* locations = instruction->GetLocations();
8229 Location first = locations->InAt(0);
8230 Location second = locations->InAt(1);
8231 DCHECK(first.Equals(locations->Out()));
8232
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008233 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008234 if (second.IsRegister()) {
8235 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00008236 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008237 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00008238 __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008239 } else {
8240 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00008241 __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008242 }
8243 } else if (second.IsConstant()) {
8244 if (instruction->IsAnd()) {
Roland Levillain199f3362014-11-27 17:15:16 +00008245 __ andl(first.AsRegister<Register>(),
Vladimir Markocde64972023-04-25 16:40:06 +00008246 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008247 } else if (instruction->IsOr()) {
Roland Levillain199f3362014-11-27 17:15:16 +00008248 __ orl(first.AsRegister<Register>(),
Vladimir Markocde64972023-04-25 16:40:06 +00008249 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008250 } else {
8251 DCHECK(instruction->IsXor());
Roland Levillain199f3362014-11-27 17:15:16 +00008252 __ xorl(first.AsRegister<Register>(),
Vladimir Markocde64972023-04-25 16:40:06 +00008253 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008254 }
8255 } else {
8256 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00008257 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008258 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00008259 __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008260 } else {
8261 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00008262 __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008263 }
8264 }
8265 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008266 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008267 if (second.IsRegisterPair()) {
8268 if (instruction->IsAnd()) {
8269 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
8270 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
8271 } else if (instruction->IsOr()) {
8272 __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
8273 __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
8274 } else {
8275 DCHECK(instruction->IsXor());
8276 __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
8277 __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
8278 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00008279 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008280 if (instruction->IsAnd()) {
8281 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
8282 __ andl(first.AsRegisterPairHigh<Register>(),
8283 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
8284 } else if (instruction->IsOr()) {
8285 __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
8286 __ orl(first.AsRegisterPairHigh<Register>(),
8287 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
8288 } else {
8289 DCHECK(instruction->IsXor());
8290 __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
8291 __ xorl(first.AsRegisterPairHigh<Register>(),
8292 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
8293 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00008294 } else {
8295 DCHECK(second.IsConstant()) << second;
Vladimir Markocde64972023-04-25 16:40:06 +00008296 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04008297 int32_t low_value = Low32Bits(value);
8298 int32_t high_value = High32Bits(value);
8299 Immediate low(low_value);
8300 Immediate high(high_value);
8301 Register first_low = first.AsRegisterPairLow<Register>();
8302 Register first_high = first.AsRegisterPairHigh<Register>();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00008303 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04008304 if (low_value == 0) {
8305 __ xorl(first_low, first_low);
8306 } else if (low_value != -1) {
8307 __ andl(first_low, low);
8308 }
8309 if (high_value == 0) {
8310 __ xorl(first_high, first_high);
8311 } else if (high_value != -1) {
8312 __ andl(first_high, high);
8313 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00008314 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04008315 if (low_value != 0) {
8316 __ orl(first_low, low);
8317 }
8318 if (high_value != 0) {
8319 __ orl(first_high, high);
8320 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00008321 } else {
8322 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04008323 if (low_value != 0) {
8324 __ xorl(first_low, low);
8325 }
8326 if (high_value != 0) {
8327 __ xorl(first_high, high);
8328 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00008329 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008330 }
8331 }
8332}
8333
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008334void InstructionCodeGeneratorX86::GenerateReferenceLoadOneRegister(
8335 HInstruction* instruction,
8336 Location out,
8337 uint32_t offset,
8338 Location maybe_temp,
8339 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00008340 Register out_reg = out.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008341 if (read_barrier_option == kWithReadBarrier) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008342 CHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008343 if (kUseBakerReadBarrier) {
8344 // Load with fast path based Baker's read barrier.
8345 // /* HeapReference<Object> */ out = *(out + offset)
8346 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08008347 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008348 } else {
8349 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008350 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain7c1559a2015-12-15 10:55:36 +00008351 // in the following move operation, as we will need it for the
8352 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00008353 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008354 __ movl(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008355 // /* HeapReference<Object> */ out = *(out + offset)
8356 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008357 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008358 }
8359 } else {
8360 // Plain load with no read barrier.
8361 // /* HeapReference<Object> */ out = *(out + offset)
8362 __ movl(out_reg, Address(out_reg, offset));
8363 __ MaybeUnpoisonHeapReference(out_reg);
8364 }
8365}
8366
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008367void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(
8368 HInstruction* instruction,
8369 Location out,
8370 Location obj,
8371 uint32_t offset,
8372 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00008373 Register out_reg = out.AsRegister<Register>();
8374 Register obj_reg = obj.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008375 if (read_barrier_option == kWithReadBarrier) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008376 CHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008377 if (kUseBakerReadBarrier) {
8378 // Load with fast path based Baker's read barrier.
8379 // /* HeapReference<Object> */ out = *(obj + offset)
8380 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08008381 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008382 } else {
8383 // Load with slow path based read barrier.
8384 // /* HeapReference<Object> */ out = *(obj + offset)
8385 __ movl(out_reg, Address(obj_reg, offset));
8386 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
8387 }
8388 } else {
8389 // Plain load with no read barrier.
8390 // /* HeapReference<Object> */ out = *(obj + offset)
8391 __ movl(out_reg, Address(obj_reg, offset));
8392 __ MaybeUnpoisonHeapReference(out_reg);
8393 }
8394}
8395
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008396void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(
8397 HInstruction* instruction,
8398 Location root,
8399 const Address& address,
8400 Label* fixup_label,
8401 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00008402 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008403 if (read_barrier_option == kWithReadBarrier) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008404 DCHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008405 if (kUseBakerReadBarrier) {
8406 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
8407 // Baker's read barrier are used:
8408 //
Roland Levillaind966ce72017-02-09 16:20:14 +00008409 // root = obj.field;
8410 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
8411 // if (temp != null) {
8412 // root = temp(root)
Roland Levillain7c1559a2015-12-15 10:55:36 +00008413 // }
8414
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008415 // /* GcRoot<mirror::Object> */ root = *address
8416 __ movl(root_reg, address);
8417 if (fixup_label != nullptr) {
8418 __ Bind(fixup_label);
8419 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00008420 static_assert(
8421 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
8422 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
8423 "have different sizes.");
8424 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
8425 "art::mirror::CompressedReference<mirror::Object> and int32_t "
8426 "have different sizes.");
8427
Vladimir Marko953437b2016-08-24 08:30:46 +00008428 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01008429 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08008430 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008431 codegen_->AddSlowPath(slow_path);
8432
Roland Levillaind966ce72017-02-09 16:20:14 +00008433 // Test the entrypoint (`Thread::Current()->pReadBarrierMarkReg ## root.reg()`).
8434 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01008435 Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(root.reg());
Roland Levillaind966ce72017-02-09 16:20:14 +00008436 __ fs()->cmpl(Address::Absolute(entry_point_offset), Immediate(0));
8437 // The entrypoint is null when the GC is not marking.
Roland Levillain7c1559a2015-12-15 10:55:36 +00008438 __ j(kNotEqual, slow_path->GetEntryLabel());
8439 __ Bind(slow_path->GetExitLabel());
8440 } else {
8441 // GC root loaded through a slow path for read barriers other
8442 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008443 // /* GcRoot<mirror::Object>* */ root = address
8444 __ leal(root_reg, address);
8445 if (fixup_label != nullptr) {
8446 __ Bind(fixup_label);
8447 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00008448 // /* mirror::Object* */ root = root->Read()
8449 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
8450 }
8451 } else {
8452 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008453 // /* GcRoot<mirror::Object> */ root = *address
8454 __ movl(root_reg, address);
8455 if (fixup_label != nullptr) {
8456 __ Bind(fixup_label);
8457 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00008458 // Note that GC roots are not affected by heap poisoning, thus we
8459 // do not have to unpoison `root_reg` here.
Roland Levillain7c1559a2015-12-15 10:55:36 +00008460 }
8461}
8462
8463void CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
8464 Location ref,
8465 Register obj,
8466 uint32_t offset,
Roland Levillain7c1559a2015-12-15 10:55:36 +00008467 bool needs_null_check) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008468 DCHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008469 DCHECK(kUseBakerReadBarrier);
8470
8471 // /* HeapReference<Object> */ ref = *(obj + offset)
8472 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00008473 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008474}
8475
8476void CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
8477 Location ref,
8478 Register obj,
8479 uint32_t data_offset,
8480 Location index,
Roland Levillain7c1559a2015-12-15 10:55:36 +00008481 bool needs_null_check) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008482 DCHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008483 DCHECK(kUseBakerReadBarrier);
8484
Roland Levillain3d312422016-06-23 13:53:42 +01008485 static_assert(
8486 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
8487 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00008488 // /* HeapReference<Object> */ ref =
8489 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008490 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00008491 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008492}
8493
8494void CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
8495 Location ref,
8496 Register obj,
8497 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01008498 bool needs_null_check,
8499 bool always_update_field,
8500 Register* temp) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008501 DCHECK(gUseReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008502 DCHECK(kUseBakerReadBarrier);
8503
8504 // In slow path based read barriers, the read barrier call is
8505 // inserted after the original load. However, in fast path based
8506 // Baker's read barriers, we need to perform the load of
8507 // mirror::Object::monitor_ *before* the original reference load.
8508 // This load-load ordering is required by the read barrier.
8509 // The fast path/slow path (for Baker's algorithm) should look like:
8510 //
8511 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
8512 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
8513 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07008514 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain7c1559a2015-12-15 10:55:36 +00008515 // if (is_gray) {
8516 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
8517 // }
8518 //
8519 // Note: the original implementation in ReadBarrier::Barrier is
8520 // slightly more complex as:
8521 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00008522 // the high-bits of rb_state, which are expected to be all zeroes
8523 // (we use CodeGeneratorX86::GenerateMemoryBarrier instead here,
8524 // which is a no-op thanks to the x86 memory model);
Roland Levillain7c1559a2015-12-15 10:55:36 +00008525 // - it performs additional checks that we do not do here for
8526 // performance reasons.
8527
8528 Register ref_reg = ref.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +00008529 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
8530
Vladimir Marko953437b2016-08-24 08:30:46 +00008531 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01008532 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07008533 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00008534 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
8535 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
8536 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
8537
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07008538 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00008539 // ref = ReadBarrier::Mark(ref);
8540 // At this point, just do the "if" and make sure that flags are preserved until the branch.
8541 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain7c1559a2015-12-15 10:55:36 +00008542 if (needs_null_check) {
8543 MaybeRecordImplicitNullCheck(instruction);
8544 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00008545
8546 // Load fence to prevent load-load reordering.
8547 // Note that this is a no-op, thanks to the x86 memory model.
8548 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
8549
8550 // The actual reference load.
8551 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00008552 __ movl(ref_reg, src); // Flags are unaffected.
8553
8554 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
8555 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01008556 SlowPathCode* slow_path;
8557 if (always_update_field) {
8558 DCHECK(temp != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01008559 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08008560 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01008561 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01008562 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08008563 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01008564 }
Vladimir Marko953437b2016-08-24 08:30:46 +00008565 AddSlowPath(slow_path);
8566
8567 // We have done the "if" of the gray bit check above, now branch based on the flags.
8568 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain7c1559a2015-12-15 10:55:36 +00008569
8570 // Object* ref = ref_addr->AsMirrorPtr()
8571 __ MaybeUnpoisonHeapReference(ref_reg);
8572
Roland Levillain7c1559a2015-12-15 10:55:36 +00008573 __ Bind(slow_path->GetExitLabel());
8574}
8575
8576void CodeGeneratorX86::GenerateReadBarrierSlow(HInstruction* instruction,
8577 Location out,
8578 Location ref,
8579 Location obj,
8580 uint32_t offset,
8581 Location index) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008582 DCHECK(gUseReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008583
Roland Levillain7c1559a2015-12-15 10:55:36 +00008584 // Insert a slow path based read barrier *after* the reference load.
8585 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00008586 // If heap poisoning is enabled, the unpoisoning of the loaded
8587 // reference will be carried out by the runtime within the slow
8588 // path.
8589 //
8590 // Note that `ref` currently does not get unpoisoned (when heap
8591 // poisoning is enabled), which is alright as the `ref` argument is
8592 // not used by the artReadBarrierSlow entry point.
8593 //
8594 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01008595 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00008596 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
8597 AddSlowPath(slow_path);
8598
Roland Levillain0d5a2812015-11-13 10:07:31 +00008599 __ jmp(slow_path->GetEntryLabel());
8600 __ Bind(slow_path->GetExitLabel());
8601}
8602
Roland Levillain7c1559a2015-12-15 10:55:36 +00008603void CodeGeneratorX86::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
8604 Location out,
8605 Location ref,
8606 Location obj,
8607 uint32_t offset,
8608 Location index) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008609 if (gUseReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00008610 // Baker's read barriers shall be handled by the fast path
8611 // (CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier).
8612 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008613 // If heap poisoning is enabled, unpoisoning will be taken care of
8614 // by the runtime within the slow path.
Roland Levillain7c1559a2015-12-15 10:55:36 +00008615 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008616 } else if (kPoisonHeapReferences) {
8617 __ UnpoisonHeapReference(out.AsRegister<Register>());
8618 }
8619}
8620
Roland Levillain7c1559a2015-12-15 10:55:36 +00008621void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction,
8622 Location out,
8623 Location root) {
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00008624 DCHECK(gUseReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008625
Roland Levillain7c1559a2015-12-15 10:55:36 +00008626 // Insert a slow path based read barrier *after* the GC root load.
8627 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00008628 // Note that GC roots are not affected by heap poisoning, so we do
8629 // not need to do anything special for this here.
8630 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01008631 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008632 AddSlowPath(slow_path);
8633
Roland Levillain0d5a2812015-11-13 10:07:31 +00008634 __ jmp(slow_path->GetEntryLabel());
8635 __ Bind(slow_path->GetExitLabel());
8636}
8637
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00008638void LocationsBuilderX86::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
Calin Juravleb1498f62015-02-16 13:13:29 +00008639 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00008640 LOG(FATAL) << "Unreachable";
8641}
8642
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00008643void InstructionCodeGeneratorX86::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
Calin Juravleb1498f62015-02-16 13:13:29 +00008644 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00008645 LOG(FATAL) << "Unreachable";
8646}
8647
Mark Mendellfe57faa2015-09-18 09:26:15 -04008648// Simple implementation of packed switch - generate cascaded compare/jumps.
8649void LocationsBuilderX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8650 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008651 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04008652 locations->SetInAt(0, Location::RequiresRegister());
8653}
8654
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008655void InstructionCodeGeneratorX86::GenPackedSwitchWithCompares(Register value_reg,
8656 int32_t lower_bound,
8657 uint32_t num_entries,
8658 HBasicBlock* switch_block,
8659 HBasicBlock* default_block) {
8660 // Figure out the correct compare values and jump conditions.
8661 // Handle the first compare/branch as a special case because it might
8662 // jump to the default case.
8663 DCHECK_GT(num_entries, 2u);
8664 Condition first_condition;
8665 uint32_t index;
8666 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8667 if (lower_bound != 0) {
8668 first_condition = kLess;
8669 __ cmpl(value_reg, Immediate(lower_bound));
8670 __ j(first_condition, codegen_->GetLabelOf(default_block));
8671 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008672
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008673 index = 1;
8674 } else {
8675 // Handle all the compare/jumps below.
8676 first_condition = kBelow;
8677 index = 0;
8678 }
8679
8680 // Handle the rest of the compare/jumps.
8681 for (; index + 1 < num_entries; index += 2) {
8682 int32_t compare_to_value = lower_bound + index + 1;
8683 __ cmpl(value_reg, Immediate(compare_to_value));
8684 // Jump to successors[index] if value < case_value[index].
8685 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
8686 // Jump to successors[index + 1] if value == case_value[index + 1].
8687 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
8688 }
8689
8690 if (index != num_entries) {
8691 // There are an odd number of entries. Handle the last one.
8692 DCHECK_EQ(index + 1, num_entries);
8693 __ cmpl(value_reg, Immediate(lower_bound + index));
8694 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008695 }
8696
8697 // And the default for any other value.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008698 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
8699 __ jmp(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008700 }
8701}
8702
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008703void InstructionCodeGeneratorX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8704 int32_t lower_bound = switch_instr->GetStartValue();
8705 uint32_t num_entries = switch_instr->GetNumEntries();
8706 LocationSummary* locations = switch_instr->GetLocations();
8707 Register value_reg = locations->InAt(0).AsRegister<Register>();
8708
8709 GenPackedSwitchWithCompares(value_reg,
8710 lower_bound,
8711 num_entries,
8712 switch_instr->GetBlock(),
8713 switch_instr->GetDefaultBlock());
8714}
8715
Mark Mendell805b3b52015-09-18 14:10:29 -04008716void LocationsBuilderX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8717 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008718 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendell805b3b52015-09-18 14:10:29 -04008719 locations->SetInAt(0, Location::RequiresRegister());
8720
8721 // Constant area pointer.
8722 locations->SetInAt(1, Location::RequiresRegister());
8723
8724 // And the temporary we need.
8725 locations->AddTemp(Location::RequiresRegister());
8726}
8727
8728void InstructionCodeGeneratorX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8729 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008730 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendell805b3b52015-09-18 14:10:29 -04008731 LocationSummary* locations = switch_instr->GetLocations();
8732 Register value_reg = locations->InAt(0).AsRegister<Register>();
8733 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8734
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008735 if (num_entries <= kPackedSwitchJumpTableThreshold) {
8736 GenPackedSwitchWithCompares(value_reg,
8737 lower_bound,
8738 num_entries,
8739 switch_instr->GetBlock(),
8740 default_block);
8741 return;
8742 }
8743
Mark Mendell805b3b52015-09-18 14:10:29 -04008744 // Optimizing has a jump area.
8745 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
8746 Register constant_area = locations->InAt(1).AsRegister<Register>();
8747
8748 // Remove the bias, if needed.
8749 if (lower_bound != 0) {
8750 __ leal(temp_reg, Address(value_reg, -lower_bound));
8751 value_reg = temp_reg;
8752 }
8753
8754 // Is the value in range?
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008755 DCHECK_GE(num_entries, 1u);
Mark Mendell805b3b52015-09-18 14:10:29 -04008756 __ cmpl(value_reg, Immediate(num_entries - 1));
8757 __ j(kAbove, codegen_->GetLabelOf(default_block));
8758
8759 // We are in the range of the table.
8760 // Load (target-constant_area) from the jump table, indexing by the value.
8761 __ movl(temp_reg, codegen_->LiteralCaseTable(switch_instr, constant_area, value_reg));
8762
8763 // Compute the actual target address by adding in constant_area.
8764 __ addl(temp_reg, constant_area);
8765
8766 // And jump.
8767 __ jmp(temp_reg);
8768}
8769
Mark Mendell0616ae02015-04-17 12:49:27 -04008770void LocationsBuilderX86::VisitX86ComputeBaseMethodAddress(
8771 HX86ComputeBaseMethodAddress* insn) {
8772 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008773 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008774 locations->SetOut(Location::RequiresRegister());
8775}
8776
8777void InstructionCodeGeneratorX86::VisitX86ComputeBaseMethodAddress(
8778 HX86ComputeBaseMethodAddress* insn) {
8779 LocationSummary* locations = insn->GetLocations();
8780 Register reg = locations->Out().AsRegister<Register>();
8781
8782 // Generate call to next instruction.
8783 Label next_instruction;
8784 __ call(&next_instruction);
8785 __ Bind(&next_instruction);
8786
8787 // Remember this offset for later use with constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008788 codegen_->AddMethodAddressOffset(insn, GetAssembler()->CodeSize());
Mark Mendell0616ae02015-04-17 12:49:27 -04008789
8790 // Grab the return address off the stack.
8791 __ popl(reg);
8792}
8793
8794void LocationsBuilderX86::VisitX86LoadFromConstantTable(
8795 HX86LoadFromConstantTable* insn) {
8796 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008797 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008798
8799 locations->SetInAt(0, Location::RequiresRegister());
8800 locations->SetInAt(1, Location::ConstantLocation(insn->GetConstant()));
8801
8802 // If we don't need to be materialized, we only need the inputs to be set.
David Brazdilb3e773e2016-01-26 11:28:37 +00008803 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008804 return;
8805 }
8806
8807 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008808 case DataType::Type::kFloat32:
8809 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008810 locations->SetOut(Location::RequiresFpuRegister());
8811 break;
8812
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008813 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008814 locations->SetOut(Location::RequiresRegister());
8815 break;
8816
8817 default:
8818 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8819 }
8820}
8821
8822void InstructionCodeGeneratorX86::VisitX86LoadFromConstantTable(HX86LoadFromConstantTable* insn) {
David Brazdilb3e773e2016-01-26 11:28:37 +00008823 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008824 return;
8825 }
8826
8827 LocationSummary* locations = insn->GetLocations();
8828 Location out = locations->Out();
8829 Register const_area = locations->InAt(0).AsRegister<Register>();
8830 HConstant *value = insn->GetConstant();
8831
8832 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008833 case DataType::Type::kFloat32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008834 __ movss(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008835 codegen_->LiteralFloatAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00008836 value->AsFloatConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008837 break;
8838
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008839 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008840 __ movsd(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008841 codegen_->LiteralDoubleAddress(
Vladimir Markocde64972023-04-25 16:40:06 +00008842 value->AsDoubleConstant()->GetValue(),
Vladimir Marko5150dbe2023-04-26 09:13:59 +00008843 insn->GetBaseMethodAddress(),
8844 const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008845 break;
8846
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008847 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008848 __ movl(out.AsRegister<Register>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008849 codegen_->LiteralInt32Address(
Vladimir Markocde64972023-04-25 16:40:06 +00008850 value->AsIntConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008851 break;
8852
8853 default:
8854 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8855 }
8856}
8857
Mark Mendell0616ae02015-04-17 12:49:27 -04008858/**
8859 * Class to handle late fixup of offsets into constant area.
8860 */
Vladimir Marko5233f932015-09-29 19:01:15 +01008861class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
Mark Mendell0616ae02015-04-17 12:49:27 -04008862 public:
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008863 RIPFixup(CodeGeneratorX86& codegen,
8864 HX86ComputeBaseMethodAddress* base_method_address,
8865 size_t offset)
8866 : codegen_(&codegen),
8867 base_method_address_(base_method_address),
8868 offset_into_constant_area_(offset) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008869
8870 protected:
8871 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
8872
8873 CodeGeneratorX86* codegen_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008874 HX86ComputeBaseMethodAddress* base_method_address_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008875
8876 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01008877 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell0616ae02015-04-17 12:49:27 -04008878 // Patch the correct offset for the instruction. The place to patch is the
8879 // last 4 bytes of the instruction.
8880 // The value to patch is the distance from the offset in the constant area
8881 // from the address computed by the HX86ComputeBaseMethodAddress instruction.
Mark Mendell805b3b52015-09-18 14:10:29 -04008882 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008883 int32_t relative_position =
8884 constant_offset - codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell0616ae02015-04-17 12:49:27 -04008885
8886 // Patch in the right value.
8887 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
8888 }
8889
Mark Mendell0616ae02015-04-17 12:49:27 -04008890 // Location in constant area that the fixup refers to.
Mark Mendell805b3b52015-09-18 14:10:29 -04008891 int32_t offset_into_constant_area_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008892};
8893
Mark Mendell805b3b52015-09-18 14:10:29 -04008894/**
8895 * Class to handle late fixup of offsets to a jump table that will be created in the
8896 * constant area.
8897 */
8898class JumpTableRIPFixup : public RIPFixup {
8899 public:
8900 JumpTableRIPFixup(CodeGeneratorX86& codegen, HX86PackedSwitch* switch_instr)
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008901 : RIPFixup(codegen, switch_instr->GetBaseMethodAddress(), static_cast<size_t>(-1)),
8902 switch_instr_(switch_instr) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008903
8904 void CreateJumpTable() {
8905 X86Assembler* assembler = codegen_->GetAssembler();
8906
8907 // Ensure that the reference to the jump table has the correct offset.
8908 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
8909 SetOffset(offset_in_constant_table);
8910
8911 // The label values in the jump table are computed relative to the
8912 // instruction addressing the constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008913 const int32_t relative_offset = codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell805b3b52015-09-18 14:10:29 -04008914
8915 // Populate the jump table with the correct values for the jump table.
8916 int32_t num_entries = switch_instr_->GetNumEntries();
8917 HBasicBlock* block = switch_instr_->GetBlock();
8918 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
8919 // The value that we want is the target offset - the position of the table.
8920 for (int32_t i = 0; i < num_entries; i++) {
8921 HBasicBlock* b = successors[i];
8922 Label* l = codegen_->GetLabelOf(b);
8923 DCHECK(l->IsBound());
8924 int32_t offset_to_block = l->Position() - relative_offset;
8925 assembler->AppendInt32(offset_to_block);
8926 }
8927 }
8928
8929 private:
8930 const HX86PackedSwitch* switch_instr_;
8931};
8932
8933void CodeGeneratorX86::Finalize(CodeAllocator* allocator) {
8934 // Generate the constant area if needed.
8935 X86Assembler* assembler = GetAssembler();
jaishank20d1c942019-03-08 15:08:17 +05308936
Mark Mendell805b3b52015-09-18 14:10:29 -04008937 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
8938 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8
8939 // byte values.
8940 assembler->Align(4, 0);
8941 constant_area_start_ = assembler->CodeSize();
8942
8943 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008944 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell805b3b52015-09-18 14:10:29 -04008945 jump_table->CreateJumpTable();
8946 }
8947
8948 // And now add the constant area to the generated code.
8949 assembler->AddConstantArea();
8950 }
8951
8952 // And finish up.
8953 CodeGenerator::Finalize(allocator);
8954}
8955
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008956Address CodeGeneratorX86::LiteralDoubleAddress(double v,
8957 HX86ComputeBaseMethodAddress* method_base,
8958 Register reg) {
8959 AssemblerFixup* fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008960 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddDouble(v));
Vladimir Marko4ef451a2020-07-23 09:54:27 +00008961 return Address(reg, kPlaceholder32BitOffset, fixup);
Mark Mendell0616ae02015-04-17 12:49:27 -04008962}
8963
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008964Address CodeGeneratorX86::LiteralFloatAddress(float v,
8965 HX86ComputeBaseMethodAddress* method_base,
8966 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008967 AssemblerFixup* fixup =
8968 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddFloat(v));
Vladimir Marko4ef451a2020-07-23 09:54:27 +00008969 return Address(reg, kPlaceholder32BitOffset, fixup);
Mark Mendell0616ae02015-04-17 12:49:27 -04008970}
8971
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008972Address CodeGeneratorX86::LiteralInt32Address(int32_t v,
8973 HX86ComputeBaseMethodAddress* method_base,
8974 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008975 AssemblerFixup* fixup =
8976 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt32(v));
Vladimir Marko4ef451a2020-07-23 09:54:27 +00008977 return Address(reg, kPlaceholder32BitOffset, fixup);
Mark Mendell0616ae02015-04-17 12:49:27 -04008978}
8979
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008980Address CodeGeneratorX86::LiteralInt64Address(int64_t v,
8981 HX86ComputeBaseMethodAddress* method_base,
8982 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008983 AssemblerFixup* fixup =
8984 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt64(v));
Vladimir Marko4ef451a2020-07-23 09:54:27 +00008985 return Address(reg, kPlaceholder32BitOffset, fixup);
Mark Mendell0616ae02015-04-17 12:49:27 -04008986}
8987
Aart Bika19616e2016-02-01 18:57:58 -08008988void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) {
8989 if (value == 0) {
8990 __ xorl(dest, dest);
8991 } else {
8992 __ movl(dest, Immediate(value));
8993 }
8994}
8995
8996void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) {
8997 if (value == 0) {
8998 __ testl(dest, dest);
8999 } else {
9000 __ cmpl(dest, Immediate(value));
9001 }
9002}
9003
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01009004void CodeGeneratorX86::GenerateIntCompare(Location lhs, Location rhs) {
9005 Register lhs_reg = lhs.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07009006 GenerateIntCompare(lhs_reg, rhs);
9007}
9008
9009void CodeGeneratorX86::GenerateIntCompare(Register lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01009010 if (rhs.IsConstant()) {
9011 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07009012 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01009013 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07009014 __ cmpl(lhs, Address(ESP, rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01009015 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07009016 __ cmpl(lhs, rhs.AsRegister<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01009017 }
9018}
9019
9020Address CodeGeneratorX86::ArrayAddress(Register obj,
9021 Location index,
9022 ScaleFactor scale,
9023 uint32_t data_offset) {
Vladimir Markof2eef5f2023-04-06 10:29:19 +00009024 return index.IsConstant()
Vladimir Markocde64972023-04-25 16:40:06 +00009025 ? Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset)
Vladimir Markof2eef5f2023-04-06 10:29:19 +00009026 : Address(obj, index.AsRegister<Register>(), scale, data_offset);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01009027}
9028
Mark Mendell805b3b52015-09-18 14:10:29 -04009029Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
9030 Register reg,
9031 Register value) {
9032 // Create a fixup to be used to create and address the jump table.
9033 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01009034 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell805b3b52015-09-18 14:10:29 -04009035
9036 // We have to populate the jump tables.
9037 fixups_to_jump_tables_.push_back(table_fixup);
9038
9039 // We want a scaled address, as we are extracting the correct offset from the table.
Vladimir Marko4ef451a2020-07-23 09:54:27 +00009040 return Address(reg, value, TIMES_4, kPlaceholder32BitOffset, table_fixup);
Mark Mendell805b3b52015-09-18 14:10:29 -04009041}
9042
Andreas Gampe85b62f22015-09-09 13:15:38 -07009043// TODO: target as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009044void CodeGeneratorX86::MoveFromReturnRegister(Location target, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07009045 if (!target.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009046 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07009047 return;
9048 }
9049
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009050 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07009051
9052 Location return_loc = InvokeDexCallingConventionVisitorX86().GetReturnLocation(type);
9053 if (target.Equals(return_loc)) {
9054 return;
9055 }
9056
9057 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
9058 // with the else branch.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009059 if (type == DataType::Type::kInt64) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01009060 HParallelMove parallel_move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009061 parallel_move.AddMove(return_loc.ToLow(), target.ToLow(), DataType::Type::kInt32, nullptr);
9062 parallel_move.AddMove(return_loc.ToHigh(), target.ToHigh(), DataType::Type::kInt32, nullptr);
Andreas Gampe85b62f22015-09-09 13:15:38 -07009063 GetMoveResolver()->EmitNativeCode(&parallel_move);
9064 } else {
9065 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01009066 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07009067 parallel_move.AddMove(return_loc, target, type, nullptr);
9068 GetMoveResolver()->EmitNativeCode(&parallel_move);
9069 }
9070}
9071
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009072void CodeGeneratorX86::PatchJitRootUse(uint8_t* code,
9073 const uint8_t* roots_data,
9074 const PatchInfo<Label>& info,
9075 uint64_t index_in_table) const {
9076 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
9077 uintptr_t address =
9078 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00009079 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009080 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
9081 dchecked_integral_cast<uint32_t>(address);
9082}
9083
Nicolas Geoffray132d8362016-11-16 09:19:42 +00009084void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
9085 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009086 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01009087 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009088 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009089 }
9090
9091 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009092 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01009093 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009094 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00009095 }
9096}
9097
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00009098void LocationsBuilderX86::VisitIntermediateAddress(
9099 [[maybe_unused]] HIntermediateAddress* instruction) {
xueliang.zhonge0eb4832017-10-30 13:43:14 +00009100 LOG(FATAL) << "Unreachable";
9101}
9102
Stefano Cianciulli78f3c722023-05-16 10:32:54 +00009103void InstructionCodeGeneratorX86::VisitIntermediateAddress(
9104 [[maybe_unused]] HIntermediateAddress* instruction) {
xueliang.zhonge0eb4832017-10-30 13:43:14 +00009105 LOG(FATAL) << "Unreachable";
9106}
9107
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05309108bool LocationsBuilderX86::CpuHasAvxFeatureFlag() {
9109 return codegen_->GetInstructionSetFeatures().HasAVX();
9110}
9111bool LocationsBuilderX86::CpuHasAvx2FeatureFlag() {
9112 return codegen_->GetInstructionSetFeatures().HasAVX2();
9113}
9114bool InstructionCodeGeneratorX86::CpuHasAvxFeatureFlag() {
9115 return codegen_->GetInstructionSetFeatures().HasAVX();
9116}
9117bool InstructionCodeGeneratorX86::CpuHasAvx2FeatureFlag() {
9118 return codegen_->GetInstructionSetFeatures().HasAVX2();
9119}
9120
Roland Levillain4d027112015-07-01 15:41:14 +01009121#undef __
9122
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00009123} // namespace x86
9124} // namespace art