blob: 65be92cdcefd7b8d3c078d86d83d8dc98be51533 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Ian Rogersb033c752011-07-20 12:22:35 -070016
Ian Rogers2c8f6532011-09-02 17:16:34 -070017#include "calling_convention_x86.h"
Ian Rogers57b86d42012-03-27 16:05:41 -070018
Andreas Gampe57943812017-12-06 21:39:13 -080019#include <android-base/logging.h>
20
Andreas Gampe639b2b12019-01-08 10:32:50 -080021#include "arch/instruction_set.h"
Vladimir Markofa458ac2020-02-12 14:08:07 +000022#include "arch/x86/jni_frame_x86.h"
Ian Rogers166db042013-07-26 12:05:57 -070023#include "utils/x86/managed_register_x86.h"
Ian Rogersb033c752011-07-20 12:22:35 -070024
25namespace art {
Ian Rogers2c8f6532011-09-02 17:16:34 -070026namespace x86 {
Ian Rogersb033c752011-07-20 12:22:35 -070027
Vladimir Markoe74e0ce2021-12-08 14:16:21 +000028static constexpr ManagedRegister kManagedCoreArgumentRegisters[] = {
29 X86ManagedRegister::FromCpuRegister(EAX),
30 X86ManagedRegister::FromCpuRegister(ECX),
31 X86ManagedRegister::FromCpuRegister(EDX),
32 X86ManagedRegister::FromCpuRegister(EBX),
Vladimir Marko662f12e2020-02-26 12:46:09 +000033};
34static constexpr size_t kManagedCoreArgumentRegistersCount =
35 arraysize(kManagedCoreArgumentRegisters);
36static constexpr size_t kManagedFpArgumentRegistersCount = 4u;
Andreas Gampe542451c2016-07-26 09:02:02 -070037
Vladimir Marko32248382016-05-19 10:37:24 +010038static constexpr ManagedRegister kCalleeSaveRegisters[] = {
39 // Core registers.
40 X86ManagedRegister::FromCpuRegister(EBP),
41 X86ManagedRegister::FromCpuRegister(ESI),
42 X86ManagedRegister::FromCpuRegister(EDI),
43 // No hard float callee saves.
44};
45
Vladimir Marko1c3c1062019-12-03 11:18:44 +000046template <size_t size>
47static constexpr uint32_t CalculateCoreCalleeSpillMask(
48 const ManagedRegister (&callee_saves)[size]) {
Vladimir Marko32248382016-05-19 10:37:24 +010049 // The spilled PC gets a special marker.
50 uint32_t result = 1 << kNumberOfCpuRegisters;
Vladimir Marko1c3c1062019-12-03 11:18:44 +000051 for (auto&& r : callee_saves) {
Vladimir Marko32248382016-05-19 10:37:24 +010052 if (r.AsX86().IsCpuRegister()) {
53 result |= (1 << r.AsX86().AsCpuRegister());
54 }
55 }
56 return result;
57}
58
Vladimir Marko1c3c1062019-12-03 11:18:44 +000059static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
Vladimir Marko32248382016-05-19 10:37:24 +010060static constexpr uint32_t kFpCalleeSpillMask = 0u;
61
Vladimir Marko1c3c1062019-12-03 11:18:44 +000062static constexpr ManagedRegister kNativeCalleeSaveRegisters[] = {
63 // Core registers.
64 X86ManagedRegister::FromCpuRegister(EBX),
65 X86ManagedRegister::FromCpuRegister(EBP),
66 X86ManagedRegister::FromCpuRegister(ESI),
67 X86ManagedRegister::FromCpuRegister(EDI),
68 // No hard float callee saves.
69};
70
71static constexpr uint32_t kNativeCoreCalleeSpillMask =
72 CalculateCoreCalleeSpillMask(kNativeCalleeSaveRegisters);
73static constexpr uint32_t kNativeFpCalleeSpillMask = 0u;
74
Ian Rogers2c8f6532011-09-02 17:16:34 -070075// Calling convention
76
Vladimir Markod95a1f22021-03-23 16:32:52 +000077ArrayRef<const ManagedRegister> X86JniCallingConvention::CalleeSaveScratchRegisters() const {
78 DCHECK(!IsCriticalNative());
79 // All managed callee-save registers are available.
80 static_assert((kCoreCalleeSpillMask & ~kNativeCoreCalleeSpillMask) == 0u);
81 static_assert(kFpCalleeSpillMask == 0u);
82 return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters);
Ian Rogersdc51b792011-09-22 20:41:37 -070083}
84
Vladimir Markoe74e0ce2021-12-08 14:16:21 +000085ArrayRef<const ManagedRegister> X86JniCallingConvention::ArgumentScratchRegisters() const {
86 DCHECK(!IsCriticalNative());
87 // Exclude EAX or EAX/EDX if they are used as return registers.
88 // Due to the odd ordering of argument registers, use a re-ordered array (pull EDX forward).
89 static constexpr ManagedRegister kArgumentRegisters[] = {
90 X86ManagedRegister::FromCpuRegister(EAX),
91 X86ManagedRegister::FromCpuRegister(EDX),
92 X86ManagedRegister::FromCpuRegister(ECX),
93 X86ManagedRegister::FromCpuRegister(EBX),
94 };
95 static_assert(arraysize(kArgumentRegisters) == kManagedCoreArgumentRegistersCount);
96 static_assert(kManagedCoreArgumentRegisters[0].Equals(kArgumentRegisters[0]));
97 static_assert(kManagedCoreArgumentRegisters[1].Equals(kArgumentRegisters[2]));
98 static_assert(kManagedCoreArgumentRegisters[2].Equals(kArgumentRegisters[1]));
99 static_assert(kManagedCoreArgumentRegisters[3].Equals(kArgumentRegisters[3]));
100 ArrayRef<const ManagedRegister> scratch_regs(kArgumentRegisters);
101 X86ManagedRegister return_reg = ReturnRegister().AsX86();
102 auto return_reg_overlaps = [return_reg](ManagedRegister reg) {
103 return return_reg.Overlaps(reg.AsX86());
104 };
105 if (return_reg_overlaps(scratch_regs[0])) {
106 scratch_regs = scratch_regs.SubArray(/*pos=*/ return_reg_overlaps(scratch_regs[1]) ? 2u : 1u);
107 }
108 DCHECK(std::none_of(scratch_regs.begin(), scratch_regs.end(), return_reg_overlaps));
109 return scratch_regs;
110}
111
Ian Rogersb5d09b22012-03-06 22:14:17 -0800112static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni) {
Ian Rogers169c9a72011-11-13 20:13:17 -0800113 if (shorty[0] == 'F' || shorty[0] == 'D') {
Ian Rogersb5d09b22012-03-06 22:14:17 -0800114 if (jni) {
115 return X86ManagedRegister::FromX87Register(ST0);
116 } else {
117 return X86ManagedRegister::FromXmmRegister(XMM0);
118 }
Ian Rogers169c9a72011-11-13 20:13:17 -0800119 } else if (shorty[0] == 'J') {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700120 return X86ManagedRegister::FromRegisterPair(EAX_EDX);
Ian Rogers169c9a72011-11-13 20:13:17 -0800121 } else if (shorty[0] == 'V') {
Ian Rogers45a76cb2011-07-21 22:00:15 -0700122 return ManagedRegister::NoRegister();
Ian Rogersb033c752011-07-20 12:22:35 -0700123 } else {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700124 return X86ManagedRegister::FromCpuRegister(EAX);
Ian Rogersb033c752011-07-20 12:22:35 -0700125 }
126}
127
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000128ManagedRegister X86ManagedRuntimeCallingConvention::ReturnRegister() const {
Ian Rogersb5d09b22012-03-06 22:14:17 -0800129 return ReturnRegisterForShorty(GetShorty(), false);
Ian Rogers2c8f6532011-09-02 17:16:34 -0700130}
131
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000132ManagedRegister X86JniCallingConvention::ReturnRegister() const {
Ian Rogersb5d09b22012-03-06 22:14:17 -0800133 return ReturnRegisterForShorty(GetShorty(), true);
Ian Rogers2c8f6532011-09-02 17:16:34 -0700134}
135
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000136ManagedRegister X86JniCallingConvention::IntReturnRegister() const {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700137 return X86ManagedRegister::FromCpuRegister(EAX);
138}
139
Ian Rogersb033c752011-07-20 12:22:35 -0700140// Managed runtime calling convention
141
Ian Rogers2c8f6532011-09-02 17:16:34 -0700142ManagedRegister X86ManagedRuntimeCallingConvention::MethodRegister() {
Ian Rogers67375ac2011-09-14 00:55:44 -0700143 return X86ManagedRegister::FromCpuRegister(EAX);
Ian Rogers2c8f6532011-09-02 17:16:34 -0700144}
145
Vladimir Marko662f12e2020-02-26 12:46:09 +0000146void X86ManagedRuntimeCallingConvention::ResetIterator(FrameOffset displacement) {
147 ManagedRuntimeCallingConvention::ResetIterator(displacement);
148 gpr_arg_count_ = 1u; // Skip EAX for ArtMethod*
149}
150
151void X86ManagedRuntimeCallingConvention::Next() {
152 if (!IsCurrentParamAFloatOrDouble()) {
153 gpr_arg_count_ += IsCurrentParamALong() ? 2u : 1u;
154 }
155 ManagedRuntimeCallingConvention::Next();
156}
157
Ian Rogers2c8f6532011-09-02 17:16:34 -0700158bool X86ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000159 if (IsCurrentParamAFloatOrDouble()) {
160 return itr_float_and_doubles_ < kManagedFpArgumentRegistersCount;
161 } else {
162 // Don't split a long between the last register and the stack.
163 size_t extra_regs = IsCurrentParamALong() ? 1u : 0u;
164 return gpr_arg_count_ + extra_regs < kManagedCoreArgumentRegistersCount;
165 }
Ian Rogersb033c752011-07-20 12:22:35 -0700166}
167
Ian Rogers2c8f6532011-09-02 17:16:34 -0700168bool X86ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000169 return !IsCurrentParamInRegister();
Ian Rogersb033c752011-07-20 12:22:35 -0700170}
171
Ian Rogers2c8f6532011-09-02 17:16:34 -0700172ManagedRegister X86ManagedRuntimeCallingConvention::CurrentParamRegister() {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000173 DCHECK(IsCurrentParamInRegister());
174 if (IsCurrentParamAFloatOrDouble()) {
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000175 // First four float parameters are passed via XMM0..XMM3
Vladimir Marko662f12e2020-02-26 12:46:09 +0000176 XmmRegister reg = static_cast<XmmRegister>(XMM0 + itr_float_and_doubles_);
177 return X86ManagedRegister::FromXmmRegister(reg);
178 } else {
179 if (IsCurrentParamALong()) {
180 switch (gpr_arg_count_) {
181 case 1:
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000182 static_assert(kManagedCoreArgumentRegisters[1].AsX86().AsCpuRegister() == ECX);
183 static_assert(kManagedCoreArgumentRegisters[2].AsX86().AsCpuRegister() == EDX);
Vladimir Marko662f12e2020-02-26 12:46:09 +0000184 return X86ManagedRegister::FromRegisterPair(ECX_EDX);
185 case 2:
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000186 static_assert(kManagedCoreArgumentRegisters[2].AsX86().AsCpuRegister() == EDX);
187 static_assert(kManagedCoreArgumentRegisters[3].AsX86().AsCpuRegister() == EBX);
Vladimir Marko662f12e2020-02-26 12:46:09 +0000188 return X86ManagedRegister::FromRegisterPair(EDX_EBX);
189 default:
190 LOG(FATAL) << "UNREACHABLE";
191 UNREACHABLE();
192 }
193 } else {
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000194 return kManagedCoreArgumentRegisters[gpr_arg_count_];
Vladimir Marko662f12e2020-02-26 12:46:09 +0000195 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000196 }
Ian Rogersb033c752011-07-20 12:22:35 -0700197}
198
Ian Rogers2c8f6532011-09-02 17:16:34 -0700199FrameOffset X86ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
Ian Rogerscdd1d2d2011-08-18 09:58:17 -0700200 return FrameOffset(displacement_.Int32Value() + // displacement
Ian Rogers790a6b72014-04-01 10:36:00 -0700201 kFramePointerSize + // Method*
202 (itr_slots_ * kFramePointerSize)); // offset into in args
Ian Rogersb033c752011-07-20 12:22:35 -0700203}
204
205// JNI calling convention
206
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700207X86JniCallingConvention::X86JniCallingConvention(bool is_static,
208 bool is_synchronized,
Vladimir Marko46a89102021-10-21 13:05:46 +0000209 bool is_fast_native,
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700210 bool is_critical_native,
jeffhao703f2cd2012-07-13 17:25:52 -0700211 const char* shorty)
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700212 : JniCallingConvention(is_static,
213 is_synchronized,
Vladimir Marko46a89102021-10-21 13:05:46 +0000214 is_fast_native,
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700215 is_critical_native,
216 shorty,
217 kX86PointerSize) {
jeffhao703f2cd2012-07-13 17:25:52 -0700218}
219
220uint32_t X86JniCallingConvention::CoreSpillMask() const {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000221 return is_critical_native_ ? 0u : kCoreCalleeSpillMask;
Vladimir Marko32248382016-05-19 10:37:24 +0100222}
223
224uint32_t X86JniCallingConvention::FpSpillMask() const {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000225 return is_critical_native_ ? 0u : kFpCalleeSpillMask;
jeffhao703f2cd2012-07-13 17:25:52 -0700226}
Ian Rogersbdb03912011-09-14 00:55:44 -0700227
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000228size_t X86JniCallingConvention::FrameSize() const {
229 if (is_critical_native_) {
230 CHECK(!SpillsMethod());
231 CHECK(!HasLocalReferenceSegmentState());
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000232 return 0u; // There is no managed frame for @CriticalNative.
233 }
234
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700235 // Method*, PC return address and callee save area size, local reference segment state
Vladimir Markocedec9d2021-02-08 16:16:13 +0000236 DCHECK(SpillsMethod());
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700237 const size_t method_ptr_size = static_cast<size_t>(kX86PointerSize);
238 const size_t pc_return_addr_size = kFramePointerSize;
239 const size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize;
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000240 size_t total_size = method_ptr_size + pc_return_addr_size + callee_save_area_size;
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700241
Vladimir Markocedec9d2021-02-08 16:16:13 +0000242 DCHECK(HasLocalReferenceSegmentState());
Vladimir Marko5f340ff2021-03-22 14:33:25 +0000243 // Cookie is saved in one of the spilled registers.
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700244
Igor Murashkin367f3dd2016-09-01 17:00:24 -0700245 return RoundUp(total_size, kStackAlignment);
Ian Rogers0d666d82011-08-14 16:03:46 -0700246}
247
Vladimir Marko86c87522020-05-11 16:55:55 +0100248size_t X86JniCallingConvention::OutFrameSize() const {
249 // The size of outgoing arguments.
250 size_t size = GetNativeOutArgsSize(/*num_args=*/ NumberOfExtraArgumentsForJni() + NumArgs(),
251 NumLongOrDoubleArgs());
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000252
253 // @CriticalNative can use tail call as all managed callee saves are preserved by AAPCS.
254 static_assert((kCoreCalleeSpillMask & ~kNativeCoreCalleeSpillMask) == 0u);
255 static_assert((kFpCalleeSpillMask & ~kNativeFpCalleeSpillMask) == 0u);
256
Vladimir Markofa458ac2020-02-12 14:08:07 +0000257 if (UNLIKELY(IsCriticalNative())) {
258 // Add return address size for @CriticalNative.
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000259 // For normal native the return PC is part of the managed stack frame instead of out args.
260 size += kFramePointerSize;
261 // For @CriticalNative, we can make a tail call if there are no stack args
262 // and the return type is not FP type (needs moving from ST0 to MMX0) and
263 // we do not need to extend the result.
264 bool return_type_ok = GetShorty()[0] == 'I' || GetShorty()[0] == 'J' || GetShorty()[0] == 'V';
265 DCHECK_EQ(
266 return_type_ok,
267 GetShorty()[0] != 'F' && GetShorty()[0] != 'D' && !RequiresSmallResultTypeExtension());
268 if (return_type_ok && size == kFramePointerSize) {
269 // Note: This is not aligned to kNativeStackAlignment but that's OK for tail call.
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000270 static_assert(kFramePointerSize < kNativeStackAlignment);
Vladimir Marko86c87522020-05-11 16:55:55 +0100271 // The stub frame size is considered 0 in the callee where the return PC is a part of
272 // the callee frame but it is kPointerSize in the compiled stub before the tail call.
273 DCHECK_EQ(0u, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000274 return kFramePointerSize;
275 }
276 }
277
Vladimir Markofa458ac2020-02-12 14:08:07 +0000278 size_t out_args_size = RoundUp(size, kNativeStackAlignment);
279 if (UNLIKELY(IsCriticalNative())) {
Vladimir Marko86c87522020-05-11 16:55:55 +0100280 DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
Vladimir Markofa458ac2020-02-12 14:08:07 +0000281 }
282 return out_args_size;
Ian Rogers7a99c112011-09-07 12:48:27 -0700283}
284
Vladimir Marko32248382016-05-19 10:37:24 +0100285ArrayRef<const ManagedRegister> X86JniCallingConvention::CalleeSaveRegisters() const {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000286 if (UNLIKELY(IsCriticalNative())) {
287 // Do not spill anything, whether tail call or not (return PC is already on the stack).
288 return ArrayRef<const ManagedRegister>();
289 } else {
290 return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters);
291 }
Vladimir Marko32248382016-05-19 10:37:24 +0100292}
293
Ian Rogers2c8f6532011-09-02 17:16:34 -0700294bool X86JniCallingConvention::IsCurrentParamInRegister() {
Ian Rogersb5d09b22012-03-06 22:14:17 -0800295 return false; // Everything is passed by stack.
Ian Rogersb033c752011-07-20 12:22:35 -0700296}
297
Ian Rogers2c8f6532011-09-02 17:16:34 -0700298bool X86JniCallingConvention::IsCurrentParamOnStack() {
Ian Rogersb5d09b22012-03-06 22:14:17 -0800299 return true; // Everything is passed by stack.
Ian Rogersb033c752011-07-20 12:22:35 -0700300}
301
Ian Rogers2c8f6532011-09-02 17:16:34 -0700302ManagedRegister X86JniCallingConvention::CurrentParamRegister() {
Ian Rogersb033c752011-07-20 12:22:35 -0700303 LOG(FATAL) << "Should not reach here";
Elliott Hughesc1896c92018-11-29 11:33:18 -0800304 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700305}
306
Ian Rogers2c8f6532011-09-02 17:16:34 -0700307FrameOffset X86JniCallingConvention::CurrentParamStackOffset() {
Vladimir Marko86c87522020-05-11 16:55:55 +0100308 return
309 FrameOffset(displacement_.Int32Value() - OutFrameSize() + (itr_slots_ * kFramePointerSize));
Ian Rogersb033c752011-07-20 12:22:35 -0700310}
311
Vladimir Marko4d527152021-11-23 12:07:04 +0000312ManagedRegister X86JniCallingConvention::LockingArgumentRegister() const {
313 DCHECK(!IsFastNative());
314 DCHECK(!IsCriticalNative());
315 DCHECK(IsSynchronized());
316 // The callee-save register is EBP is suitable as a locking argument.
317 static_assert(kCalleeSaveRegisters[0].Equals(X86ManagedRegister::FromCpuRegister(EBP)));
318 return X86ManagedRegister::FromCpuRegister(EBP);
319}
320
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000321ManagedRegister X86JniCallingConvention::HiddenArgumentRegister() const {
322 CHECK(IsCriticalNative());
323 // EAX is neither managed callee-save, nor argument register, nor scratch register.
324 DCHECK(std::none_of(kCalleeSaveRegisters,
325 kCalleeSaveRegisters + std::size(kCalleeSaveRegisters),
326 [](ManagedRegister callee_save) constexpr {
327 return callee_save.Equals(X86ManagedRegister::FromCpuRegister(EAX));
328 }));
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000329 return X86ManagedRegister::FromCpuRegister(EAX);
330}
331
332bool X86JniCallingConvention::UseTailCall() const {
333 CHECK(IsCriticalNative());
Vladimir Marko86c87522020-05-11 16:55:55 +0100334 return OutFrameSize() == kFramePointerSize;
Ian Rogersb033c752011-07-20 12:22:35 -0700335}
336
Ian Rogers2c8f6532011-09-02 17:16:34 -0700337} // namespace x86
Ian Rogersb033c752011-07-20 12:22:35 -0700338} // namespace art