Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 16 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 17 | #include "calling_convention_x86.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 18 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 19 | #include <android-base/logging.h> |
| 20 | |
Andreas Gampe | 639b2b1 | 2019-01-08 10:32:50 -0800 | [diff] [blame] | 21 | #include "arch/instruction_set.h" |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 22 | #include "arch/x86/jni_frame_x86.h" |
Ian Rogers | 166db04 | 2013-07-26 12:05:57 -0700 | [diff] [blame] | 23 | #include "utils/x86/managed_register_x86.h" |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 24 | |
| 25 | namespace art { |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 26 | namespace x86 { |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 27 | |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 28 | static constexpr ManagedRegister kManagedCoreArgumentRegisters[] = { |
| 29 | X86ManagedRegister::FromCpuRegister(EAX), |
| 30 | X86ManagedRegister::FromCpuRegister(ECX), |
| 31 | X86ManagedRegister::FromCpuRegister(EDX), |
| 32 | X86ManagedRegister::FromCpuRegister(EBX), |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 33 | }; |
| 34 | static constexpr size_t kManagedCoreArgumentRegistersCount = |
| 35 | arraysize(kManagedCoreArgumentRegisters); |
| 36 | static constexpr size_t kManagedFpArgumentRegistersCount = 4u; |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 37 | |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 38 | static constexpr ManagedRegister kCalleeSaveRegisters[] = { |
| 39 | // Core registers. |
| 40 | X86ManagedRegister::FromCpuRegister(EBP), |
| 41 | X86ManagedRegister::FromCpuRegister(ESI), |
| 42 | X86ManagedRegister::FromCpuRegister(EDI), |
| 43 | // No hard float callee saves. |
| 44 | }; |
| 45 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 46 | template <size_t size> |
| 47 | static constexpr uint32_t CalculateCoreCalleeSpillMask( |
| 48 | const ManagedRegister (&callee_saves)[size]) { |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 49 | // The spilled PC gets a special marker. |
| 50 | uint32_t result = 1 << kNumberOfCpuRegisters; |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 51 | for (auto&& r : callee_saves) { |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 52 | if (r.AsX86().IsCpuRegister()) { |
| 53 | result |= (1 << r.AsX86().AsCpuRegister()); |
| 54 | } |
| 55 | } |
| 56 | return result; |
| 57 | } |
| 58 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 59 | static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters); |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 60 | static constexpr uint32_t kFpCalleeSpillMask = 0u; |
| 61 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 62 | static constexpr ManagedRegister kNativeCalleeSaveRegisters[] = { |
| 63 | // Core registers. |
| 64 | X86ManagedRegister::FromCpuRegister(EBX), |
| 65 | X86ManagedRegister::FromCpuRegister(EBP), |
| 66 | X86ManagedRegister::FromCpuRegister(ESI), |
| 67 | X86ManagedRegister::FromCpuRegister(EDI), |
| 68 | // No hard float callee saves. |
| 69 | }; |
| 70 | |
| 71 | static constexpr uint32_t kNativeCoreCalleeSpillMask = |
| 72 | CalculateCoreCalleeSpillMask(kNativeCalleeSaveRegisters); |
| 73 | static constexpr uint32_t kNativeFpCalleeSpillMask = 0u; |
| 74 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 75 | // Calling convention |
| 76 | |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 77 | ArrayRef<const ManagedRegister> X86JniCallingConvention::CalleeSaveScratchRegisters() const { |
| 78 | DCHECK(!IsCriticalNative()); |
| 79 | // All managed callee-save registers are available. |
| 80 | static_assert((kCoreCalleeSpillMask & ~kNativeCoreCalleeSpillMask) == 0u); |
| 81 | static_assert(kFpCalleeSpillMask == 0u); |
| 82 | return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters); |
Ian Rogers | dc51b79 | 2011-09-22 20:41:37 -0700 | [diff] [blame] | 83 | } |
| 84 | |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 85 | ArrayRef<const ManagedRegister> X86JniCallingConvention::ArgumentScratchRegisters() const { |
| 86 | DCHECK(!IsCriticalNative()); |
| 87 | // Exclude EAX or EAX/EDX if they are used as return registers. |
| 88 | // Due to the odd ordering of argument registers, use a re-ordered array (pull EDX forward). |
| 89 | static constexpr ManagedRegister kArgumentRegisters[] = { |
| 90 | X86ManagedRegister::FromCpuRegister(EAX), |
| 91 | X86ManagedRegister::FromCpuRegister(EDX), |
| 92 | X86ManagedRegister::FromCpuRegister(ECX), |
| 93 | X86ManagedRegister::FromCpuRegister(EBX), |
| 94 | }; |
| 95 | static_assert(arraysize(kArgumentRegisters) == kManagedCoreArgumentRegistersCount); |
| 96 | static_assert(kManagedCoreArgumentRegisters[0].Equals(kArgumentRegisters[0])); |
| 97 | static_assert(kManagedCoreArgumentRegisters[1].Equals(kArgumentRegisters[2])); |
| 98 | static_assert(kManagedCoreArgumentRegisters[2].Equals(kArgumentRegisters[1])); |
| 99 | static_assert(kManagedCoreArgumentRegisters[3].Equals(kArgumentRegisters[3])); |
| 100 | ArrayRef<const ManagedRegister> scratch_regs(kArgumentRegisters); |
| 101 | X86ManagedRegister return_reg = ReturnRegister().AsX86(); |
| 102 | auto return_reg_overlaps = [return_reg](ManagedRegister reg) { |
| 103 | return return_reg.Overlaps(reg.AsX86()); |
| 104 | }; |
| 105 | if (return_reg_overlaps(scratch_regs[0])) { |
| 106 | scratch_regs = scratch_regs.SubArray(/*pos=*/ return_reg_overlaps(scratch_regs[1]) ? 2u : 1u); |
| 107 | } |
| 108 | DCHECK(std::none_of(scratch_regs.begin(), scratch_regs.end(), return_reg_overlaps)); |
| 109 | return scratch_regs; |
| 110 | } |
| 111 | |
Ian Rogers | b5d09b2 | 2012-03-06 22:14:17 -0800 | [diff] [blame] | 112 | static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni) { |
Ian Rogers | 169c9a7 | 2011-11-13 20:13:17 -0800 | [diff] [blame] | 113 | if (shorty[0] == 'F' || shorty[0] == 'D') { |
Ian Rogers | b5d09b2 | 2012-03-06 22:14:17 -0800 | [diff] [blame] | 114 | if (jni) { |
| 115 | return X86ManagedRegister::FromX87Register(ST0); |
| 116 | } else { |
| 117 | return X86ManagedRegister::FromXmmRegister(XMM0); |
| 118 | } |
Ian Rogers | 169c9a7 | 2011-11-13 20:13:17 -0800 | [diff] [blame] | 119 | } else if (shorty[0] == 'J') { |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 120 | return X86ManagedRegister::FromRegisterPair(EAX_EDX); |
Ian Rogers | 169c9a7 | 2011-11-13 20:13:17 -0800 | [diff] [blame] | 121 | } else if (shorty[0] == 'V') { |
Ian Rogers | 45a76cb | 2011-07-21 22:00:15 -0700 | [diff] [blame] | 122 | return ManagedRegister::NoRegister(); |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 123 | } else { |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 124 | return X86ManagedRegister::FromCpuRegister(EAX); |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 125 | } |
| 126 | } |
| 127 | |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 128 | ManagedRegister X86ManagedRuntimeCallingConvention::ReturnRegister() const { |
Ian Rogers | b5d09b2 | 2012-03-06 22:14:17 -0800 | [diff] [blame] | 129 | return ReturnRegisterForShorty(GetShorty(), false); |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 130 | } |
| 131 | |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 132 | ManagedRegister X86JniCallingConvention::ReturnRegister() const { |
Ian Rogers | b5d09b2 | 2012-03-06 22:14:17 -0800 | [diff] [blame] | 133 | return ReturnRegisterForShorty(GetShorty(), true); |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 134 | } |
| 135 | |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 136 | ManagedRegister X86JniCallingConvention::IntReturnRegister() const { |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 137 | return X86ManagedRegister::FromCpuRegister(EAX); |
| 138 | } |
| 139 | |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 140 | // Managed runtime calling convention |
| 141 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 142 | ManagedRegister X86ManagedRuntimeCallingConvention::MethodRegister() { |
Ian Rogers | 67375ac | 2011-09-14 00:55:44 -0700 | [diff] [blame] | 143 | return X86ManagedRegister::FromCpuRegister(EAX); |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 144 | } |
| 145 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 146 | void X86ManagedRuntimeCallingConvention::ResetIterator(FrameOffset displacement) { |
| 147 | ManagedRuntimeCallingConvention::ResetIterator(displacement); |
| 148 | gpr_arg_count_ = 1u; // Skip EAX for ArtMethod* |
| 149 | } |
| 150 | |
| 151 | void X86ManagedRuntimeCallingConvention::Next() { |
| 152 | if (!IsCurrentParamAFloatOrDouble()) { |
| 153 | gpr_arg_count_ += IsCurrentParamALong() ? 2u : 1u; |
| 154 | } |
| 155 | ManagedRuntimeCallingConvention::Next(); |
| 156 | } |
| 157 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 158 | bool X86ManagedRuntimeCallingConvention::IsCurrentParamInRegister() { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 159 | if (IsCurrentParamAFloatOrDouble()) { |
| 160 | return itr_float_and_doubles_ < kManagedFpArgumentRegistersCount; |
| 161 | } else { |
| 162 | // Don't split a long between the last register and the stack. |
| 163 | size_t extra_regs = IsCurrentParamALong() ? 1u : 0u; |
| 164 | return gpr_arg_count_ + extra_regs < kManagedCoreArgumentRegistersCount; |
| 165 | } |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 166 | } |
| 167 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 168 | bool X86ManagedRuntimeCallingConvention::IsCurrentParamOnStack() { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 169 | return !IsCurrentParamInRegister(); |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 170 | } |
| 171 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 172 | ManagedRegister X86ManagedRuntimeCallingConvention::CurrentParamRegister() { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 173 | DCHECK(IsCurrentParamInRegister()); |
| 174 | if (IsCurrentParamAFloatOrDouble()) { |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 175 | // First four float parameters are passed via XMM0..XMM3 |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 176 | XmmRegister reg = static_cast<XmmRegister>(XMM0 + itr_float_and_doubles_); |
| 177 | return X86ManagedRegister::FromXmmRegister(reg); |
| 178 | } else { |
| 179 | if (IsCurrentParamALong()) { |
| 180 | switch (gpr_arg_count_) { |
| 181 | case 1: |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 182 | static_assert(kManagedCoreArgumentRegisters[1].AsX86().AsCpuRegister() == ECX); |
| 183 | static_assert(kManagedCoreArgumentRegisters[2].AsX86().AsCpuRegister() == EDX); |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 184 | return X86ManagedRegister::FromRegisterPair(ECX_EDX); |
| 185 | case 2: |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 186 | static_assert(kManagedCoreArgumentRegisters[2].AsX86().AsCpuRegister() == EDX); |
| 187 | static_assert(kManagedCoreArgumentRegisters[3].AsX86().AsCpuRegister() == EBX); |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 188 | return X86ManagedRegister::FromRegisterPair(EDX_EBX); |
| 189 | default: |
| 190 | LOG(FATAL) << "UNREACHABLE"; |
| 191 | UNREACHABLE(); |
| 192 | } |
| 193 | } else { |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 194 | return kManagedCoreArgumentRegisters[gpr_arg_count_]; |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 195 | } |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 196 | } |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 197 | } |
| 198 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 199 | FrameOffset X86ManagedRuntimeCallingConvention::CurrentParamStackOffset() { |
Ian Rogers | cdd1d2d | 2011-08-18 09:58:17 -0700 | [diff] [blame] | 200 | return FrameOffset(displacement_.Int32Value() + // displacement |
Ian Rogers | 790a6b7 | 2014-04-01 10:36:00 -0700 | [diff] [blame] | 201 | kFramePointerSize + // Method* |
| 202 | (itr_slots_ * kFramePointerSize)); // offset into in args |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 203 | } |
| 204 | |
| 205 | // JNI calling convention |
| 206 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 207 | X86JniCallingConvention::X86JniCallingConvention(bool is_static, |
| 208 | bool is_synchronized, |
Vladimir Marko | 46a8910 | 2021-10-21 13:05:46 +0000 | [diff] [blame] | 209 | bool is_fast_native, |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 210 | bool is_critical_native, |
jeffhao | 703f2cd | 2012-07-13 17:25:52 -0700 | [diff] [blame] | 211 | const char* shorty) |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 212 | : JniCallingConvention(is_static, |
| 213 | is_synchronized, |
Vladimir Marko | 46a8910 | 2021-10-21 13:05:46 +0000 | [diff] [blame] | 214 | is_fast_native, |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 215 | is_critical_native, |
| 216 | shorty, |
| 217 | kX86PointerSize) { |
jeffhao | 703f2cd | 2012-07-13 17:25:52 -0700 | [diff] [blame] | 218 | } |
| 219 | |
| 220 | uint32_t X86JniCallingConvention::CoreSpillMask() const { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 221 | return is_critical_native_ ? 0u : kCoreCalleeSpillMask; |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 222 | } |
| 223 | |
| 224 | uint32_t X86JniCallingConvention::FpSpillMask() const { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 225 | return is_critical_native_ ? 0u : kFpCalleeSpillMask; |
jeffhao | 703f2cd | 2012-07-13 17:25:52 -0700 | [diff] [blame] | 226 | } |
Ian Rogers | bdb0391 | 2011-09-14 00:55:44 -0700 | [diff] [blame] | 227 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 228 | size_t X86JniCallingConvention::FrameSize() const { |
| 229 | if (is_critical_native_) { |
| 230 | CHECK(!SpillsMethod()); |
| 231 | CHECK(!HasLocalReferenceSegmentState()); |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 232 | return 0u; // There is no managed frame for @CriticalNative. |
| 233 | } |
| 234 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 235 | // Method*, PC return address and callee save area size, local reference segment state |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 236 | DCHECK(SpillsMethod()); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 237 | const size_t method_ptr_size = static_cast<size_t>(kX86PointerSize); |
| 238 | const size_t pc_return_addr_size = kFramePointerSize; |
| 239 | const size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize; |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 240 | size_t total_size = method_ptr_size + pc_return_addr_size + callee_save_area_size; |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 241 | |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 242 | DCHECK(HasLocalReferenceSegmentState()); |
Vladimir Marko | 5f340ff | 2021-03-22 14:33:25 +0000 | [diff] [blame] | 243 | // Cookie is saved in one of the spilled registers. |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 244 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame] | 245 | return RoundUp(total_size, kStackAlignment); |
Ian Rogers | 0d666d8 | 2011-08-14 16:03:46 -0700 | [diff] [blame] | 246 | } |
| 247 | |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 248 | size_t X86JniCallingConvention::OutFrameSize() const { |
| 249 | // The size of outgoing arguments. |
| 250 | size_t size = GetNativeOutArgsSize(/*num_args=*/ NumberOfExtraArgumentsForJni() + NumArgs(), |
| 251 | NumLongOrDoubleArgs()); |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 252 | |
| 253 | // @CriticalNative can use tail call as all managed callee saves are preserved by AAPCS. |
| 254 | static_assert((kCoreCalleeSpillMask & ~kNativeCoreCalleeSpillMask) == 0u); |
| 255 | static_assert((kFpCalleeSpillMask & ~kNativeFpCalleeSpillMask) == 0u); |
| 256 | |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 257 | if (UNLIKELY(IsCriticalNative())) { |
| 258 | // Add return address size for @CriticalNative. |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 259 | // For normal native the return PC is part of the managed stack frame instead of out args. |
| 260 | size += kFramePointerSize; |
| 261 | // For @CriticalNative, we can make a tail call if there are no stack args |
| 262 | // and the return type is not FP type (needs moving from ST0 to MMX0) and |
| 263 | // we do not need to extend the result. |
| 264 | bool return_type_ok = GetShorty()[0] == 'I' || GetShorty()[0] == 'J' || GetShorty()[0] == 'V'; |
| 265 | DCHECK_EQ( |
| 266 | return_type_ok, |
| 267 | GetShorty()[0] != 'F' && GetShorty()[0] != 'D' && !RequiresSmallResultTypeExtension()); |
| 268 | if (return_type_ok && size == kFramePointerSize) { |
| 269 | // Note: This is not aligned to kNativeStackAlignment but that's OK for tail call. |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 270 | static_assert(kFramePointerSize < kNativeStackAlignment); |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 271 | // The stub frame size is considered 0 in the callee where the return PC is a part of |
| 272 | // the callee frame but it is kPointerSize in the compiled stub before the tail call. |
| 273 | DCHECK_EQ(0u, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u)); |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 274 | return kFramePointerSize; |
| 275 | } |
| 276 | } |
| 277 | |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 278 | size_t out_args_size = RoundUp(size, kNativeStackAlignment); |
| 279 | if (UNLIKELY(IsCriticalNative())) { |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 280 | DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u)); |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 281 | } |
| 282 | return out_args_size; |
Ian Rogers | 7a99c11 | 2011-09-07 12:48:27 -0700 | [diff] [blame] | 283 | } |
| 284 | |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 285 | ArrayRef<const ManagedRegister> X86JniCallingConvention::CalleeSaveRegisters() const { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 286 | if (UNLIKELY(IsCriticalNative())) { |
| 287 | // Do not spill anything, whether tail call or not (return PC is already on the stack). |
| 288 | return ArrayRef<const ManagedRegister>(); |
| 289 | } else { |
| 290 | return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters); |
| 291 | } |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 292 | } |
| 293 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 294 | bool X86JniCallingConvention::IsCurrentParamInRegister() { |
Ian Rogers | b5d09b2 | 2012-03-06 22:14:17 -0800 | [diff] [blame] | 295 | return false; // Everything is passed by stack. |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 296 | } |
| 297 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 298 | bool X86JniCallingConvention::IsCurrentParamOnStack() { |
Ian Rogers | b5d09b2 | 2012-03-06 22:14:17 -0800 | [diff] [blame] | 299 | return true; // Everything is passed by stack. |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 300 | } |
| 301 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 302 | ManagedRegister X86JniCallingConvention::CurrentParamRegister() { |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 303 | LOG(FATAL) << "Should not reach here"; |
Elliott Hughes | c1896c9 | 2018-11-29 11:33:18 -0800 | [diff] [blame] | 304 | UNREACHABLE(); |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 305 | } |
| 306 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 307 | FrameOffset X86JniCallingConvention::CurrentParamStackOffset() { |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 308 | return |
| 309 | FrameOffset(displacement_.Int32Value() - OutFrameSize() + (itr_slots_ * kFramePointerSize)); |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 310 | } |
| 311 | |
Vladimir Marko | 4d52715 | 2021-11-23 12:07:04 +0000 | [diff] [blame] | 312 | ManagedRegister X86JniCallingConvention::LockingArgumentRegister() const { |
| 313 | DCHECK(!IsFastNative()); |
| 314 | DCHECK(!IsCriticalNative()); |
| 315 | DCHECK(IsSynchronized()); |
| 316 | // The callee-save register is EBP is suitable as a locking argument. |
| 317 | static_assert(kCalleeSaveRegisters[0].Equals(X86ManagedRegister::FromCpuRegister(EBP))); |
| 318 | return X86ManagedRegister::FromCpuRegister(EBP); |
| 319 | } |
| 320 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 321 | ManagedRegister X86JniCallingConvention::HiddenArgumentRegister() const { |
| 322 | CHECK(IsCriticalNative()); |
| 323 | // EAX is neither managed callee-save, nor argument register, nor scratch register. |
| 324 | DCHECK(std::none_of(kCalleeSaveRegisters, |
| 325 | kCalleeSaveRegisters + std::size(kCalleeSaveRegisters), |
| 326 | [](ManagedRegister callee_save) constexpr { |
| 327 | return callee_save.Equals(X86ManagedRegister::FromCpuRegister(EAX)); |
| 328 | })); |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 329 | return X86ManagedRegister::FromCpuRegister(EAX); |
| 330 | } |
| 331 | |
| 332 | bool X86JniCallingConvention::UseTailCall() const { |
| 333 | CHECK(IsCriticalNative()); |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 334 | return OutFrameSize() == kFramePointerSize; |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 335 | } |
| 336 | |
Ian Rogers | 2c8f653 | 2011-09-02 17:16:34 -0700 | [diff] [blame] | 337 | } // namespace x86 |
Ian Rogers | b033c75 | 2011-07-20 12:22:35 -0700 | [diff] [blame] | 338 | } // namespace art |