Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_ |
| 18 | #define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_ |
| 19 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 20 | #include "base/enums.h" |
Vladimir Marko | 7968cae | 2021-01-19 12:02:35 +0000 | [diff] [blame] | 21 | #include "class_root.h" |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 22 | #include "code_generator.h" |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 23 | #include "common_arm.h" |
David Sehr | 312f3b2 | 2018-03-19 08:39:26 -0700 | [diff] [blame] | 24 | #include "dex/string_reference.h" |
| 25 | #include "dex/type_reference.h" |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 26 | #include "driver/compiler_options.h" |
| 27 | #include "nodes.h" |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 28 | #include "parallel_move_resolver.h" |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 29 | #include "utils/arm/assembler_arm_vixl.h" |
| 30 | |
| 31 | // TODO(VIXL): make vixl clean wrt -Wshadow. |
| 32 | #pragma GCC diagnostic push |
| 33 | #pragma GCC diagnostic ignored "-Wshadow" |
| 34 | #include "aarch32/constants-aarch32.h" |
| 35 | #include "aarch32/instructions-aarch32.h" |
| 36 | #include "aarch32/macro-assembler-aarch32.h" |
| 37 | #pragma GCC diagnostic pop |
| 38 | |
Vladimir Marko | 0a51605 | 2019-10-14 13:00:44 +0000 | [diff] [blame] | 39 | namespace art { |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 40 | |
| 41 | namespace linker { |
| 42 | class Thumb2RelativePatcherTest; |
| 43 | } // namespace linker |
| 44 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 45 | namespace arm { |
| 46 | |
Roland Levillain | ba650a4 | 2017-03-06 13:52:32 +0000 | [diff] [blame] | 47 | // This constant is used as an approximate margin when emission of veneer and literal pools |
| 48 | // must be blocked. |
| 49 | static constexpr int kMaxMacroInstructionSizeInBytes = |
| 50 | 15 * vixl::aarch32::kMaxInstructionSizeInBytes; |
| 51 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 52 | static const vixl::aarch32::Register kParameterCoreRegistersVIXL[] = { |
| 53 | vixl::aarch32::r1, |
| 54 | vixl::aarch32::r2, |
| 55 | vixl::aarch32::r3 |
| 56 | }; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 57 | static const size_t kParameterCoreRegistersLengthVIXL = arraysize(kParameterCoreRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 58 | static const vixl::aarch32::SRegister kParameterFpuRegistersVIXL[] = { |
| 59 | vixl::aarch32::s0, |
| 60 | vixl::aarch32::s1, |
| 61 | vixl::aarch32::s2, |
| 62 | vixl::aarch32::s3, |
| 63 | vixl::aarch32::s4, |
| 64 | vixl::aarch32::s5, |
| 65 | vixl::aarch32::s6, |
| 66 | vixl::aarch32::s7, |
| 67 | vixl::aarch32::s8, |
| 68 | vixl::aarch32::s9, |
| 69 | vixl::aarch32::s10, |
| 70 | vixl::aarch32::s11, |
| 71 | vixl::aarch32::s12, |
| 72 | vixl::aarch32::s13, |
| 73 | vixl::aarch32::s14, |
| 74 | vixl::aarch32::s15 |
| 75 | }; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 76 | static const size_t kParameterFpuRegistersLengthVIXL = arraysize(kParameterFpuRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 77 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 78 | static const vixl::aarch32::Register kMethodRegister = vixl::aarch32::r0; |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 79 | |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 80 | // Callee saves core registers r5, r6, r7, r8 (except when emitting Baker |
| 81 | // read barriers, where it is used as Marking Register), r10, r11, and lr. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 82 | static const vixl::aarch32::RegisterList kCoreCalleeSaves = vixl::aarch32::RegisterList::Union( |
| 83 | vixl::aarch32::RegisterList(vixl::aarch32::r5, |
| 84 | vixl::aarch32::r6, |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 85 | vixl::aarch32::r7), |
| 86 | // Do not consider r8 as a callee-save register with Baker read barriers. |
Lokesh Gidra | 88ccd92 | 2022-09-28 23:23:43 -0700 | [diff] [blame] | 87 | (kReserveMarkingRegister |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 88 | ? vixl::aarch32::RegisterList() |
| 89 | : vixl::aarch32::RegisterList(vixl::aarch32::r8)), |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 90 | vixl::aarch32::RegisterList(vixl::aarch32::r10, |
| 91 | vixl::aarch32::r11, |
| 92 | vixl::aarch32::lr)); |
| 93 | |
| 94 | // Callee saves FP registers s16 to s31 inclusive. |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 95 | static const vixl::aarch32::SRegisterList kFpuCalleeSaves = |
| 96 | vixl::aarch32::SRegisterList(vixl::aarch32::s16, 16); |
| 97 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 98 | static const vixl::aarch32::Register kRuntimeParameterCoreRegistersVIXL[] = { |
| 99 | vixl::aarch32::r0, |
| 100 | vixl::aarch32::r1, |
| 101 | vixl::aarch32::r2, |
| 102 | vixl::aarch32::r3 |
| 103 | }; |
| 104 | static const size_t kRuntimeParameterCoreRegistersLengthVIXL = |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 105 | arraysize(kRuntimeParameterCoreRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 106 | static const vixl::aarch32::SRegister kRuntimeParameterFpuRegistersVIXL[] = { |
| 107 | vixl::aarch32::s0, |
| 108 | vixl::aarch32::s1, |
| 109 | vixl::aarch32::s2, |
| 110 | vixl::aarch32::s3 |
| 111 | }; |
| 112 | static const size_t kRuntimeParameterFpuRegistersLengthVIXL = |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 113 | arraysize(kRuntimeParameterFpuRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 114 | |
| 115 | class LoadClassSlowPathARMVIXL; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 116 | class CodeGeneratorARMVIXL; |
| 117 | |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 118 | using VIXLInt32Literal = vixl::aarch32::Literal<int32_t>; |
| 119 | using VIXLUInt32Literal = vixl::aarch32::Literal<uint32_t>; |
| 120 | |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 121 | class JumpTableARMVIXL : public DeletableArenaObject<kArenaAllocSwitchTable> { |
| 122 | public: |
| 123 | explicit JumpTableARMVIXL(HPackedSwitch* switch_instr) |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 124 | : switch_instr_(switch_instr), |
| 125 | table_start_(), |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 126 | bb_addresses_(switch_instr->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) { |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 127 | uint32_t num_entries = switch_instr_->GetNumEntries(); |
| 128 | for (uint32_t i = 0; i < num_entries; i++) { |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 129 | VIXLInt32Literal *lit = new VIXLInt32Literal(0, vixl32::RawLiteral::kManuallyPlaced); |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 130 | bb_addresses_.emplace_back(lit); |
| 131 | } |
| 132 | } |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 133 | |
| 134 | vixl::aarch32::Label* GetTableStartLabel() { return &table_start_; } |
| 135 | |
| 136 | void EmitTable(CodeGeneratorARMVIXL* codegen); |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 137 | void FixTable(CodeGeneratorARMVIXL* codegen); |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 138 | |
| 139 | private: |
| 140 | HPackedSwitch* const switch_instr_; |
| 141 | vixl::aarch32::Label table_start_; |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 142 | ArenaVector<std::unique_ptr<VIXLInt32Literal>> bb_addresses_; |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 143 | |
| 144 | DISALLOW_COPY_AND_ASSIGN(JumpTableARMVIXL); |
| 145 | }; |
| 146 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 147 | class InvokeRuntimeCallingConventionARMVIXL |
| 148 | : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> { |
| 149 | public: |
| 150 | InvokeRuntimeCallingConventionARMVIXL() |
| 151 | : CallingConvention(kRuntimeParameterCoreRegistersVIXL, |
| 152 | kRuntimeParameterCoreRegistersLengthVIXL, |
| 153 | kRuntimeParameterFpuRegistersVIXL, |
| 154 | kRuntimeParameterFpuRegistersLengthVIXL, |
| 155 | kArmPointerSize) {} |
| 156 | |
| 157 | private: |
| 158 | DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConventionARMVIXL); |
| 159 | }; |
| 160 | |
| 161 | class InvokeDexCallingConventionARMVIXL |
| 162 | : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> { |
| 163 | public: |
| 164 | InvokeDexCallingConventionARMVIXL() |
| 165 | : CallingConvention(kParameterCoreRegistersVIXL, |
| 166 | kParameterCoreRegistersLengthVIXL, |
| 167 | kParameterFpuRegistersVIXL, |
| 168 | kParameterFpuRegistersLengthVIXL, |
| 169 | kArmPointerSize) {} |
| 170 | |
| 171 | private: |
| 172 | DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionARMVIXL); |
| 173 | }; |
| 174 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 175 | class InvokeDexCallingConventionVisitorARMVIXL : public InvokeDexCallingConventionVisitor { |
| 176 | public: |
| 177 | InvokeDexCallingConventionVisitorARMVIXL() {} |
| 178 | virtual ~InvokeDexCallingConventionVisitorARMVIXL() {} |
| 179 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 180 | Location GetNextLocation(DataType::Type type) override; |
| 181 | Location GetReturnLocation(DataType::Type type) const override; |
| 182 | Location GetMethodLocation() const override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 183 | |
| 184 | private: |
| 185 | InvokeDexCallingConventionARMVIXL calling_convention; |
| 186 | uint32_t double_index_ = 0; |
| 187 | |
| 188 | DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorARMVIXL); |
| 189 | }; |
| 190 | |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 191 | class CriticalNativeCallingConventionVisitorARMVIXL : public InvokeDexCallingConventionVisitor { |
| 192 | public: |
| 193 | explicit CriticalNativeCallingConventionVisitorARMVIXL(bool for_register_allocation) |
| 194 | : for_register_allocation_(for_register_allocation) {} |
| 195 | |
| 196 | virtual ~CriticalNativeCallingConventionVisitorARMVIXL() {} |
| 197 | |
| 198 | Location GetNextLocation(DataType::Type type) override; |
| 199 | Location GetReturnLocation(DataType::Type type) const override; |
| 200 | Location GetMethodLocation() const override; |
| 201 | |
| 202 | size_t GetStackOffset() const { return stack_offset_; } |
| 203 | |
| 204 | private: |
| 205 | // Register allocator does not support adjusting frame size, so we cannot provide final locations |
| 206 | // of stack arguments for register allocation. We ask the register allocator for any location and |
| 207 | // move these arguments to the right place after adjusting the SP when generating the call. |
| 208 | const bool for_register_allocation_; |
| 209 | size_t gpr_index_ = 0u; |
| 210 | size_t stack_offset_ = 0u; |
| 211 | |
| 212 | DISALLOW_COPY_AND_ASSIGN(CriticalNativeCallingConventionVisitorARMVIXL); |
| 213 | }; |
| 214 | |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 215 | class FieldAccessCallingConventionARMVIXL : public FieldAccessCallingConvention { |
| 216 | public: |
| 217 | FieldAccessCallingConventionARMVIXL() {} |
| 218 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 219 | Location GetObjectLocation() const override { |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 220 | return helpers::LocationFrom(vixl::aarch32::r1); |
| 221 | } |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 222 | Location GetFieldIndexLocation() const override { |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 223 | return helpers::LocationFrom(vixl::aarch32::r0); |
| 224 | } |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 225 | Location GetReturnLocation(DataType::Type type) const override { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 226 | return DataType::Is64BitType(type) |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 227 | ? helpers::LocationFrom(vixl::aarch32::r0, vixl::aarch32::r1) |
| 228 | : helpers::LocationFrom(vixl::aarch32::r0); |
| 229 | } |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 230 | Location GetSetValueLocation(DataType::Type type, bool is_instance) const override { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 231 | return DataType::Is64BitType(type) |
Nicolas Geoffray | a72859d | 2017-01-26 22:47:27 +0000 | [diff] [blame] | 232 | ? helpers::LocationFrom(vixl::aarch32::r2, vixl::aarch32::r3) |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 233 | : (is_instance |
| 234 | ? helpers::LocationFrom(vixl::aarch32::r2) |
| 235 | : helpers::LocationFrom(vixl::aarch32::r1)); |
| 236 | } |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 237 | Location GetFpuLocation(DataType::Type type) const override { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 238 | return DataType::Is64BitType(type) |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 239 | ? helpers::LocationFrom(vixl::aarch32::s0, vixl::aarch32::s1) |
| 240 | : helpers::LocationFrom(vixl::aarch32::s0); |
| 241 | } |
| 242 | |
| 243 | private: |
| 244 | DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionARMVIXL); |
| 245 | }; |
| 246 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 247 | class SlowPathCodeARMVIXL : public SlowPathCode { |
| 248 | public: |
| 249 | explicit SlowPathCodeARMVIXL(HInstruction* instruction) |
| 250 | : SlowPathCode(instruction), entry_label_(), exit_label_() {} |
| 251 | |
| 252 | vixl::aarch32::Label* GetEntryLabel() { return &entry_label_; } |
| 253 | vixl::aarch32::Label* GetExitLabel() { return &exit_label_; } |
| 254 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 255 | void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) override; |
| 256 | void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 257 | |
| 258 | private: |
| 259 | vixl::aarch32::Label entry_label_; |
| 260 | vixl::aarch32::Label exit_label_; |
| 261 | |
| 262 | DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARMVIXL); |
| 263 | }; |
| 264 | |
| 265 | class ParallelMoveResolverARMVIXL : public ParallelMoveResolverWithSwap { |
| 266 | public: |
| 267 | ParallelMoveResolverARMVIXL(ArenaAllocator* allocator, CodeGeneratorARMVIXL* codegen) |
| 268 | : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {} |
| 269 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 270 | void EmitMove(size_t index) override; |
| 271 | void EmitSwap(size_t index) override; |
| 272 | void SpillScratch(int reg) override; |
| 273 | void RestoreScratch(int reg) override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 274 | |
| 275 | ArmVIXLAssembler* GetAssembler() const; |
| 276 | |
| 277 | private: |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 278 | void Exchange(vixl32::Register reg, int mem); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 279 | void Exchange(int mem1, int mem2); |
| 280 | |
| 281 | CodeGeneratorARMVIXL* const codegen_; |
| 282 | |
| 283 | DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverARMVIXL); |
| 284 | }; |
| 285 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 286 | class LocationsBuilderARMVIXL : public HGraphVisitor { |
| 287 | public: |
| 288 | LocationsBuilderARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen) |
| 289 | : HGraphVisitor(graph), codegen_(codegen) {} |
| 290 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 291 | #define DECLARE_VISIT_INSTRUCTION(name, super) \ |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 292 | void Visit##name(H##name* instr) override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 293 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 294 | FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION) |
| 295 | FOR_EACH_CONCRETE_INSTRUCTION_ARM(DECLARE_VISIT_INSTRUCTION) |
| 296 | FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION) |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 297 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 298 | #undef DECLARE_VISIT_INSTRUCTION |
| 299 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 300 | void VisitInstruction(HInstruction* instruction) override { |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 301 | LOG(FATAL) << "Unreachable instruction " << instruction->DebugName() |
| 302 | << " (id " << instruction->GetId() << ")"; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 303 | } |
| 304 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 305 | private: |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 306 | void HandleInvoke(HInvoke* invoke); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 307 | void HandleBitwiseOperation(HBinaryOperation* operation, Opcode opcode); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 308 | void HandleCondition(HCondition* condition); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 309 | void HandleIntegerRotate(LocationSummary* locations); |
| 310 | void HandleLongRotate(LocationSummary* locations); |
| 311 | void HandleShift(HBinaryOperation* operation); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 312 | void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info); |
| 313 | void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 314 | |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 315 | Location ArithmeticZeroOrFpuRegister(HInstruction* input); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 316 | Location ArmEncodableConstantOrRegister(HInstruction* constant, Opcode opcode); |
| 317 | bool CanEncodeConstantAsImmediate(HConstant* input_cst, Opcode opcode); |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 318 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 319 | CodeGeneratorARMVIXL* const codegen_; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 320 | InvokeDexCallingConventionVisitorARMVIXL parameter_visitor_; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 321 | |
| 322 | DISALLOW_COPY_AND_ASSIGN(LocationsBuilderARMVIXL); |
| 323 | }; |
| 324 | |
| 325 | class InstructionCodeGeneratorARMVIXL : public InstructionCodeGenerator { |
| 326 | public: |
| 327 | InstructionCodeGeneratorARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen); |
| 328 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 329 | #define DECLARE_VISIT_INSTRUCTION(name, super) \ |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 330 | void Visit##name(H##name* instr) override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 331 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 332 | FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION) |
| 333 | FOR_EACH_CONCRETE_INSTRUCTION_ARM(DECLARE_VISIT_INSTRUCTION) |
| 334 | FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION) |
| 335 | |
| 336 | #undef DECLARE_VISIT_INSTRUCTION |
| 337 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 338 | void VisitInstruction(HInstruction* instruction) override { |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 339 | LOG(FATAL) << "Unreachable instruction " << instruction->DebugName() |
| 340 | << " (id " << instruction->GetId() << ")"; |
| 341 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 342 | |
| 343 | ArmVIXLAssembler* GetAssembler() const { return assembler_; } |
xueliang.zhong | f51bc62 | 2016-11-04 09:23:32 +0000 | [diff] [blame] | 344 | ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 345 | |
Nicolas Geoffray | 8b8d93d | 2020-09-17 14:30:01 +0100 | [diff] [blame] | 346 | void GenerateAndConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value); |
| 347 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 348 | private: |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 349 | // Generate code for the given suspend check. If not null, `successor` |
| 350 | // is the block to branch to if the suspend check is not needed, and after |
| 351 | // the suspend call. |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 352 | void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 353 | void GenerateClassInitializationCheck(LoadClassSlowPathARMVIXL* slow_path, |
| 354 | vixl32::Register class_reg); |
Vladimir Marko | 175e786 | 2018-03-27 09:03:13 +0000 | [diff] [blame] | 355 | void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check, |
| 356 | vixl::aarch32::Register temp, |
| 357 | vixl::aarch32::FlagsUpdate flags_update); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 358 | void GenerateOrrConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value); |
| 359 | void GenerateEorConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value); |
Anton Kirilov | dda4396 | 2016-11-21 19:55:20 +0000 | [diff] [blame] | 360 | void GenerateAddLongConst(Location out, Location first, uint64_t value); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 361 | void HandleBitwiseOperation(HBinaryOperation* operation); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 362 | void HandleCondition(HCondition* condition); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 363 | void HandleIntegerRotate(HRor* ror); |
| 364 | void HandleLongRotate(HRor* ror); |
| 365 | void HandleShift(HBinaryOperation* operation); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 366 | |
| 367 | void GenerateWideAtomicStore(vixl::aarch32::Register addr, |
| 368 | uint32_t offset, |
| 369 | vixl::aarch32::Register value_lo, |
| 370 | vixl::aarch32::Register value_hi, |
| 371 | vixl::aarch32::Register temp1, |
| 372 | vixl::aarch32::Register temp2, |
| 373 | HInstruction* instruction); |
| 374 | void GenerateWideAtomicLoad(vixl::aarch32::Register addr, |
| 375 | uint32_t offset, |
| 376 | vixl::aarch32::Register out_lo, |
| 377 | vixl::aarch32::Register out_hi); |
| 378 | |
| 379 | void HandleFieldSet(HInstruction* instruction, |
| 380 | const FieldInfo& field_info, |
| 381 | bool value_can_be_null); |
| 382 | void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info); |
| 383 | |
Aart Bik | 351df3e | 2018-03-07 11:54:57 -0800 | [diff] [blame] | 384 | void GenerateMinMaxInt(LocationSummary* locations, bool is_min); |
Aart Bik | 1f8d51b | 2018-02-15 10:42:37 -0800 | [diff] [blame] | 385 | void GenerateMinMaxLong(LocationSummary* locations, bool is_min); |
Aart Bik | 351df3e | 2018-03-07 11:54:57 -0800 | [diff] [blame] | 386 | void GenerateMinMaxFloat(HInstruction* minmax, bool is_min); |
| 387 | void GenerateMinMaxDouble(HInstruction* minmax, bool is_min); |
| 388 | void GenerateMinMax(HBinaryOperation* minmax, bool is_min); |
Aart Bik | 1f8d51b | 2018-02-15 10:42:37 -0800 | [diff] [blame] | 389 | |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 390 | // Generate a heap reference load using one register `out`: |
| 391 | // |
| 392 | // out <- *(out + offset) |
| 393 | // |
| 394 | // while honoring heap poisoning and/or read barriers (if any). |
| 395 | // |
| 396 | // Location `maybe_temp` is used when generating a read barrier and |
| 397 | // shall be a register in that case; it may be an invalid location |
| 398 | // otherwise. |
| 399 | void GenerateReferenceLoadOneRegister(HInstruction* instruction, |
| 400 | Location out, |
| 401 | uint32_t offset, |
Artem Serov | 657022c | 2016-11-23 14:19:38 +0000 | [diff] [blame] | 402 | Location maybe_temp, |
| 403 | ReadBarrierOption read_barrier_option); |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 404 | // Generate a heap reference load using two different registers |
| 405 | // `out` and `obj`: |
| 406 | // |
| 407 | // out <- *(obj + offset) |
| 408 | // |
| 409 | // while honoring heap poisoning and/or read barriers (if any). |
| 410 | // |
| 411 | // Location `maybe_temp` is used when generating a Baker's (fast |
| 412 | // path) read barrier and shall be a register in that case; it may |
| 413 | // be an invalid location otherwise. |
| 414 | void GenerateReferenceLoadTwoRegisters(HInstruction* instruction, |
| 415 | Location out, |
| 416 | Location obj, |
| 417 | uint32_t offset, |
Artem Serov | 657022c | 2016-11-23 14:19:38 +0000 | [diff] [blame] | 418 | Location maybe_temp, |
| 419 | ReadBarrierOption read_barrier_option); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 420 | void GenerateTestAndBranch(HInstruction* instruction, |
| 421 | size_t condition_input_index, |
| 422 | vixl::aarch32::Label* true_target, |
xueliang.zhong | f51bc62 | 2016-11-04 09:23:32 +0000 | [diff] [blame] | 423 | vixl::aarch32::Label* false_target, |
| 424 | bool far_target = true); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 425 | void GenerateCompareTestAndBranch(HCondition* condition, |
| 426 | vixl::aarch32::Label* true_target, |
Anton Kirilov | fd52253 | 2017-05-10 12:46:57 +0100 | [diff] [blame] | 427 | vixl::aarch32::Label* false_target, |
| 428 | bool is_far_target = true); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 429 | void DivRemOneOrMinusOne(HBinaryOperation* instruction); |
| 430 | void DivRemByPowerOfTwo(HBinaryOperation* instruction); |
| 431 | void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction); |
| 432 | void GenerateDivRemConstantIntegral(HBinaryOperation* instruction); |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 433 | void HandleGoto(HInstruction* got, HBasicBlock* successor); |
Mythri Alle | 5097f83 | 2021-11-02 14:52:30 +0000 | [diff] [blame] | 434 | void GenerateMethodEntryExitHook(HInstruction* instruction); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 435 | |
Artem Serov | 8f7c410 | 2017-06-21 11:21:37 +0100 | [diff] [blame] | 436 | vixl::aarch32::MemOperand VecAddress( |
| 437 | HVecMemoryOperation* instruction, |
| 438 | // This function may acquire a scratch register. |
| 439 | vixl::aarch32::UseScratchRegisterScope* temps_scope, |
| 440 | /*out*/ vixl32::Register* scratch); |
| 441 | vixl::aarch32::AlignedMemOperand VecAddressUnaligned( |
| 442 | HVecMemoryOperation* instruction, |
| 443 | // This function may acquire a scratch register. |
| 444 | vixl::aarch32::UseScratchRegisterScope* temps_scope, |
| 445 | /*out*/ vixl32::Register* scratch); |
| 446 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 447 | ArmVIXLAssembler* const assembler_; |
| 448 | CodeGeneratorARMVIXL* const codegen_; |
| 449 | |
| 450 | DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorARMVIXL); |
| 451 | }; |
| 452 | |
| 453 | class CodeGeneratorARMVIXL : public CodeGenerator { |
| 454 | public: |
| 455 | CodeGeneratorARMVIXL(HGraph* graph, |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 456 | const CompilerOptions& compiler_options, |
| 457 | OptimizingCompilerStats* stats = nullptr); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 458 | virtual ~CodeGeneratorARMVIXL() {} |
| 459 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 460 | void GenerateFrameEntry() override; |
| 461 | void GenerateFrameExit() override; |
| 462 | void Bind(HBasicBlock* block) override; |
| 463 | void MoveConstant(Location destination, int32_t value) override; |
| 464 | void MoveLocation(Location dst, Location src, DataType::Type dst_type) override; |
| 465 | void AddLocationAsTemp(Location location, LocationSummary* locations) override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 466 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 467 | size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) override; |
| 468 | size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) override; |
| 469 | size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) override; |
| 470 | size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 471 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 472 | size_t GetWordSize() const override { |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 473 | return static_cast<size_t>(kArmPointerSize); |
| 474 | } |
| 475 | |
Artem Serov | 6a0b657 | 2019-07-26 20:38:37 +0100 | [diff] [blame] | 476 | size_t GetCalleePreservedFPWidth() const override { |
| 477 | return vixl::aarch32::kSRegSizeInBytes; |
| 478 | } |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 479 | |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 480 | size_t GetSIMDRegisterWidth() const override { |
| 481 | // ARM 32-bit backend doesn't support Q registers in vectorizer, only D |
| 482 | // registers (due to register allocator restrictions: overlapping s/d/q |
| 483 | // registers). |
| 484 | return vixl::aarch32::kDRegSizeInBytes; |
| 485 | } |
| 486 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 487 | HGraphVisitor* GetLocationBuilder() override { return &location_builder_; } |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 488 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 489 | HGraphVisitor* GetInstructionVisitor() override { return &instruction_visitor_; } |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 490 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 491 | ArmVIXLAssembler* GetAssembler() override { return &assembler_; } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 492 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 493 | const ArmVIXLAssembler& GetAssembler() const override { return assembler_; } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 494 | |
xueliang.zhong | f51bc62 | 2016-11-04 09:23:32 +0000 | [diff] [blame] | 495 | ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 496 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 497 | uintptr_t GetAddressOf(HBasicBlock* block) override { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 498 | vixl::aarch32::Label* block_entry_label = GetLabelOf(block); |
| 499 | DCHECK(block_entry_label->IsBound()); |
| 500 | return block_entry_label->GetLocation(); |
| 501 | } |
| 502 | |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 503 | void FixJumpTables(); |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 504 | void SetupBlockedRegisters() const override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 505 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 506 | void DumpCoreRegister(std::ostream& stream, int reg) const override; |
| 507 | void DumpFloatingPointRegister(std::ostream& stream, int reg) const override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 508 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 509 | ParallelMoveResolver* GetMoveResolver() override { return &move_resolver_; } |
| 510 | InstructionSet GetInstructionSet() const override { return InstructionSet::kThumb2; } |
Vladimir Marko | a043111 | 2018-06-25 09:32:54 +0100 | [diff] [blame] | 511 | |
| 512 | const ArmInstructionSetFeatures& GetInstructionSetFeatures() const; |
| 513 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 514 | // Helper method to move a 32-bit value between two locations. |
| 515 | void Move32(Location destination, Location source); |
| 516 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 517 | void LoadFromShiftedRegOffset(DataType::Type type, |
Scott Wakeling | c34dba7 | 2016-10-03 10:14:44 +0100 | [diff] [blame] | 518 | Location out_loc, |
| 519 | vixl::aarch32::Register base, |
| 520 | vixl::aarch32::Register reg_index, |
| 521 | vixl::aarch32::Condition cond = vixl::aarch32::al); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 522 | void StoreToShiftedRegOffset(DataType::Type type, |
Scott Wakeling | c34dba7 | 2016-10-03 10:14:44 +0100 | [diff] [blame] | 523 | Location out_loc, |
| 524 | vixl::aarch32::Register base, |
| 525 | vixl::aarch32::Register reg_index, |
| 526 | vixl::aarch32::Condition cond = vixl::aarch32::al); |
| 527 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 528 | // Generate code to invoke a runtime entry point. |
| 529 | void InvokeRuntime(QuickEntrypointEnum entrypoint, |
| 530 | HInstruction* instruction, |
| 531 | uint32_t dex_pc, |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 532 | SlowPathCode* slow_path = nullptr) override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 533 | |
| 534 | // Generate code to invoke a runtime entry point, but do not record |
| 535 | // PC-related information in a stack map. |
| 536 | void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset, |
| 537 | HInstruction* instruction, |
| 538 | SlowPathCode* slow_path); |
| 539 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 540 | // Emit a write barrier. |
| 541 | void MarkGCCard(vixl::aarch32::Register temp, |
| 542 | vixl::aarch32::Register card, |
| 543 | vixl::aarch32::Register object, |
| 544 | vixl::aarch32::Register value, |
Vladimir Marko | ac3fcff | 2020-11-17 12:17:58 +0000 | [diff] [blame] | 545 | bool value_can_be_null); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 546 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 547 | void GenerateMemoryBarrier(MemBarrierKind kind); |
| 548 | |
| 549 | vixl::aarch32::Label* GetLabelOf(HBasicBlock* block) { |
| 550 | block = FirstNonEmptyBlock(block); |
| 551 | return &(block_labels_[block->GetBlockId()]); |
| 552 | } |
| 553 | |
Donghui Bai | 426b49c | 2016-11-08 14:55:38 +0800 | [diff] [blame] | 554 | vixl32::Label* GetFinalLabel(HInstruction* instruction, vixl32::Label* final_label); |
| 555 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 556 | void Initialize() override { |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 557 | block_labels_.resize(GetGraph()->GetBlocks().size()); |
| 558 | } |
| 559 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 560 | void Finalize(CodeAllocator* allocator) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 561 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 562 | bool NeedsTwoRegisters(DataType::Type type) const override { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 563 | return type == DataType::Type::kFloat64 || type == DataType::Type::kInt64; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 564 | } |
| 565 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 566 | void ComputeSpillMask() override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 567 | |
| 568 | vixl::aarch32::Label* GetFrameEntryLabel() { return &frame_entry_label_; } |
| 569 | |
| 570 | // Check if the desired_string_load_kind is supported. If it is, return it, |
| 571 | // otherwise return a fall-back kind that should be used instead. |
| 572 | HLoadString::LoadKind GetSupportedLoadStringKind( |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 573 | HLoadString::LoadKind desired_string_load_kind) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 574 | |
| 575 | // Check if the desired_class_load_kind is supported. If it is, return it, |
| 576 | // otherwise return a fall-back kind that should be used instead. |
| 577 | HLoadClass::LoadKind GetSupportedLoadClassKind( |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 578 | HLoadClass::LoadKind desired_class_load_kind) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 579 | |
| 580 | // Check if the desired_dispatch_info is supported. If it is, return it, |
| 581 | // otherwise return a fall-back info that should be used instead. |
| 582 | HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch( |
| 583 | const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, |
Nicolas Geoffray | bdb2ecc | 2018-09-18 14:33:55 +0100 | [diff] [blame] | 584 | ArtMethod* method) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 585 | |
Nicolas Geoffray | 8d34a18 | 2020-09-16 09:46:58 +0100 | [diff] [blame] | 586 | void LoadMethod(MethodLoadKind load_kind, Location temp, HInvoke* invoke); |
Vladimir Marko | e7197bf | 2017-06-02 17:00:23 +0100 | [diff] [blame] | 587 | void GenerateStaticOrDirectCall( |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 588 | HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override; |
Vladimir Marko | e7197bf | 2017-06-02 17:00:23 +0100 | [diff] [blame] | 589 | void GenerateVirtualCall( |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 590 | HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 591 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 592 | void MoveFromReturnRegister(Location trg, DataType::Type type) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 593 | |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 594 | // The PcRelativePatchInfo is used for PC-relative addressing of methods/strings/types, |
| 595 | // whether through .data.bimg.rel.ro, .bss, or directly in the boot image. |
| 596 | // |
| 597 | // The PC-relative address is loaded with three instructions, |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 598 | // MOVW+MOVT to load the offset to base_reg and then ADD base_reg, PC. The offset |
| 599 | // is calculated from the ADD's effective PC, i.e. PC+4 on Thumb2. Though we |
| 600 | // currently emit these 3 instructions together, instruction scheduling could |
| 601 | // split this sequence apart, so we keep separate labels for each of them. |
| 602 | struct PcRelativePatchInfo { |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 603 | PcRelativePatchInfo(const DexFile* dex_file, uint32_t off_or_idx) |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 604 | : target_dex_file(dex_file), offset_or_index(off_or_idx) { } |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 605 | |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 606 | // Target dex file or null for .data.bmig.rel.ro patches. |
| 607 | const DexFile* target_dex_file; |
| 608 | // Either the boot image offset (to write to .data.bmig.rel.ro) or string/type/method index. |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 609 | uint32_t offset_or_index; |
| 610 | vixl::aarch32::Label movw_label; |
| 611 | vixl::aarch32::Label movt_label; |
| 612 | vixl::aarch32::Label add_pc_label; |
| 613 | }; |
| 614 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 615 | PcRelativePatchInfo* NewBootImageIntrinsicPatch(uint32_t intrinsic_data); |
Vladimir Marko | b066d43 | 2018-01-03 13:14:37 +0000 | [diff] [blame] | 616 | PcRelativePatchInfo* NewBootImageRelRoPatch(uint32_t boot_image_offset); |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 617 | PcRelativePatchInfo* NewBootImageMethodPatch(MethodReference target_method); |
Vladimir Marko | 0eb882b | 2017-05-15 13:39:18 +0100 | [diff] [blame] | 618 | PcRelativePatchInfo* NewMethodBssEntryPatch(MethodReference target_method); |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 619 | PcRelativePatchInfo* NewBootImageTypePatch(const DexFile& dex_file, dex::TypeIndex type_index); |
Vladimir Marko | 8f63f10 | 2020-09-28 12:10:28 +0100 | [diff] [blame] | 620 | PcRelativePatchInfo* NewTypeBssEntryPatch(HLoadClass* load_class); |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 621 | PcRelativePatchInfo* NewBootImageStringPatch(const DexFile& dex_file, |
| 622 | dex::StringIndex string_index); |
Vladimir Marko | 6cfbdbc | 2017-07-25 13:26:39 +0100 | [diff] [blame] | 623 | PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file, |
| 624 | dex::StringIndex string_index); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 625 | |
Vladimir Marko | f667508 | 2019-05-17 12:05:28 +0100 | [diff] [blame] | 626 | // Emit the BL instruction for entrypoint thunk call and record the associated patch for AOT. |
| 627 | void EmitEntrypointThunkCall(ThreadOffset32 entrypoint_offset); |
| 628 | |
Vladimir Marko | 966b46f | 2018-08-03 10:20:19 +0000 | [diff] [blame] | 629 | // Emit the BNE instruction for baker read barrier and record |
| 630 | // the associated patch for AOT or slow path for JIT. |
| 631 | void EmitBakerReadBarrierBne(uint32_t custom_data); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 632 | |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 633 | VIXLUInt32Literal* DeduplicateBootImageAddressLiteral(uint32_t address); |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 634 | VIXLUInt32Literal* DeduplicateJitStringLiteral(const DexFile& dex_file, |
Nicolas Geoffray | f0acfe7 | 2017-01-09 20:54:52 +0000 | [diff] [blame] | 635 | dex::StringIndex string_index, |
| 636 | Handle<mirror::String> handle); |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 637 | VIXLUInt32Literal* DeduplicateJitClassLiteral(const DexFile& dex_file, |
| 638 | dex::TypeIndex type_index, |
Nicolas Geoffray | 5247c08 | 2017-01-13 14:17:29 +0000 | [diff] [blame] | 639 | Handle<mirror::Class> handle); |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 640 | |
Vladimir Marko | 9d31daa | 2022-04-14 10:48:44 +0100 | [diff] [blame] | 641 | void LoadBootImageRelRoEntry(vixl::aarch32::Register reg, uint32_t boot_image_offset); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 642 | void LoadBootImageAddress(vixl::aarch32::Register reg, uint32_t boot_image_reference); |
Vladimir Marko | 7968cae | 2021-01-19 12:02:35 +0000 | [diff] [blame] | 643 | void LoadTypeForBootImageIntrinsic(vixl::aarch32::Register reg, TypeReference type_reference); |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 644 | void LoadIntrinsicDeclaringClass(vixl::aarch32::Register reg, HInvoke* invoke); |
Vladimir Marko | 7968cae | 2021-01-19 12:02:35 +0000 | [diff] [blame] | 645 | void LoadClassRootForIntrinsic(vixl::aarch32::Register reg, ClassRoot class_root); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 646 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 647 | void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) override; |
| 648 | bool NeedsThunkCode(const linker::LinkerPatch& patch) const override; |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 649 | void EmitThunkCode(const linker::LinkerPatch& patch, |
| 650 | /*out*/ ArenaVector<uint8_t>* code, |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 651 | /*out*/ std::string* debug_name) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 652 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 653 | void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) override; |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 654 | |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 655 | // Generate a GC root reference load: |
| 656 | // |
| 657 | // root <- *(obj + offset) |
| 658 | // |
| 659 | // while honoring read barriers based on read_barrier_option. |
| 660 | void GenerateGcRootFieldLoad(HInstruction* instruction, |
| 661 | Location root, |
| 662 | vixl::aarch32::Register obj, |
| 663 | uint32_t offset, |
| 664 | ReadBarrierOption read_barrier_option); |
Vladimir Marko | 3d350a8 | 2020-11-18 14:14:27 +0000 | [diff] [blame] | 665 | // Generate MOV for an intrinsic CAS to mark the old value with Baker read barrier. |
| 666 | void GenerateIntrinsicCasMoveWithBakerReadBarrier(vixl::aarch32::Register marked_old_value, |
| 667 | vixl::aarch32::Register old_value); |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 668 | // Fast path implementation of ReadBarrier::Barrier for a heap |
| 669 | // reference field load when Baker's read barriers are used. |
Vladimir Marko | 248141f | 2018-08-10 10:40:07 +0100 | [diff] [blame] | 670 | // Overload suitable for Unsafe.getObject/-Volatile() intrinsic. |
| 671 | void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction, |
| 672 | Location ref, |
| 673 | vixl::aarch32::Register obj, |
| 674 | const vixl::aarch32::MemOperand& src, |
| 675 | bool needs_null_check); |
| 676 | // Fast path implementation of ReadBarrier::Barrier for a heap |
| 677 | // reference field load when Baker's read barriers are used. |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 678 | void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction, |
| 679 | Location ref, |
| 680 | vixl::aarch32::Register obj, |
| 681 | uint32_t offset, |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 682 | Location maybe_temp, |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 683 | bool needs_null_check); |
Anton Kirilov | edb2ac3 | 2016-11-30 15:14:10 +0000 | [diff] [blame] | 684 | // Fast path implementation of ReadBarrier::Barrier for a heap |
| 685 | // reference array load when Baker's read barriers are used. |
Vladimir Marko | 008e09f3 | 2018-08-06 15:42:43 +0100 | [diff] [blame] | 686 | void GenerateArrayLoadWithBakerReadBarrier(Location ref, |
Anton Kirilov | edb2ac3 | 2016-11-30 15:14:10 +0000 | [diff] [blame] | 687 | vixl::aarch32::Register obj, |
| 688 | uint32_t data_offset, |
| 689 | Location index, |
| 690 | Location temp, |
| 691 | bool needs_null_check); |
Roland Levillain | ff48700 | 2017-03-07 16:50:01 +0000 | [diff] [blame] | 692 | |
Roland Levillain | 5daa495 | 2017-07-03 17:23:56 +0100 | [diff] [blame] | 693 | // Emit code checking the status of the Marking Register, and |
| 694 | // aborting the program if MR does not match the value stored in the |
| 695 | // art::Thread object. Code is only emitted in debug mode and if |
| 696 | // CompilerOptions::EmitRunTimeChecksInDebugMode returns true. |
| 697 | // |
| 698 | // Argument `code` is used to identify the different occurrences of |
| 699 | // MaybeGenerateMarkingRegisterCheck in the code generator, and is |
| 700 | // used together with kMarkingRegisterCheckBreakCodeBaseCode to |
| 701 | // create the value passed to the BKPT instruction. Note that unlike |
| 702 | // in the ARM64 code generator, where `__LINE__` is passed as `code` |
| 703 | // argument to |
| 704 | // CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck, we cannot |
| 705 | // realistically do that here, as Encoding T1 for the BKPT |
| 706 | // instruction only accepts 8-bit immediate values. |
| 707 | // |
| 708 | // If `temp_loc` is a valid location, it is expected to be a |
| 709 | // register and will be used as a temporary to generate code; |
| 710 | // otherwise, a temporary will be fetched from the core register |
| 711 | // scratch pool. |
| 712 | virtual void MaybeGenerateMarkingRegisterCheck(int code, |
| 713 | Location temp_loc = Location::NoLocation()); |
| 714 | |
Vladimir Marko | 3d350a8 | 2020-11-18 14:14:27 +0000 | [diff] [blame] | 715 | // Create slow path for a read barrier for a heap reference within `instruction`. |
| 716 | // |
| 717 | // This is a helper function for GenerateReadBarrierSlow() that has the same |
| 718 | // arguments. The creation and adding of the slow path is exposed for intrinsics |
| 719 | // that cannot use GenerateReadBarrierSlow() from their own slow paths. |
| 720 | SlowPathCodeARMVIXL* AddReadBarrierSlowPath(HInstruction* instruction, |
| 721 | Location out, |
| 722 | Location ref, |
| 723 | Location obj, |
| 724 | uint32_t offset, |
| 725 | Location index); |
| 726 | |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 727 | // Generate a read barrier for a heap reference within `instruction` |
| 728 | // using a slow path. |
| 729 | // |
| 730 | // A read barrier for an object reference read from the heap is |
| 731 | // implemented as a call to the artReadBarrierSlow runtime entry |
| 732 | // point, which is passed the values in locations `ref`, `obj`, and |
| 733 | // `offset`: |
| 734 | // |
| 735 | // mirror::Object* artReadBarrierSlow(mirror::Object* ref, |
| 736 | // mirror::Object* obj, |
| 737 | // uint32_t offset); |
| 738 | // |
| 739 | // The `out` location contains the value returned by |
| 740 | // artReadBarrierSlow. |
| 741 | // |
| 742 | // When `index` is provided (i.e. for array accesses), the offset |
| 743 | // value passed to artReadBarrierSlow is adjusted to take `index` |
| 744 | // into account. |
| 745 | void GenerateReadBarrierSlow(HInstruction* instruction, |
| 746 | Location out, |
| 747 | Location ref, |
| 748 | Location obj, |
| 749 | uint32_t offset, |
| 750 | Location index = Location::NoLocation()); |
| 751 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 752 | // If read barriers are enabled, generate a read barrier for a heap |
| 753 | // reference using a slow path. If heap poisoning is enabled, also |
| 754 | // unpoison the reference in `out`. |
| 755 | void MaybeGenerateReadBarrierSlow(HInstruction* instruction, |
| 756 | Location out, |
| 757 | Location ref, |
| 758 | Location obj, |
| 759 | uint32_t offset, |
| 760 | Location index = Location::NoLocation()); |
| 761 | |
Anton Kirilov | edb2ac3 | 2016-11-30 15:14:10 +0000 | [diff] [blame] | 762 | // Generate a read barrier for a GC root within `instruction` using |
| 763 | // a slow path. |
| 764 | // |
| 765 | // A read barrier for an object reference GC root is implemented as |
| 766 | // a call to the artReadBarrierForRootSlow runtime entry point, |
| 767 | // which is passed the value in location `root`: |
| 768 | // |
| 769 | // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root); |
| 770 | // |
| 771 | // The `out` location contains the value returned by |
| 772 | // artReadBarrierForRootSlow. |
| 773 | void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root); |
| 774 | |
Vladimir Marko | dec7817 | 2020-06-19 15:31:23 +0100 | [diff] [blame] | 775 | void IncreaseFrame(size_t adjustment) override; |
| 776 | void DecreaseFrame(size_t adjustment) override; |
| 777 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 778 | void GenerateNop() override; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 779 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 780 | void GenerateImplicitNullCheck(HNullCheck* instruction) override; |
| 781 | void GenerateExplicitNullCheck(HNullCheck* instruction) override; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 782 | |
| 783 | JumpTableARMVIXL* CreateJumpTable(HPackedSwitch* switch_instr) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 784 | jump_tables_.emplace_back(new (GetGraph()->GetAllocator()) JumpTableARMVIXL(switch_instr)); |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 785 | return jump_tables_.back().get(); |
| 786 | } |
| 787 | void EmitJumpTables(); |
| 788 | |
| 789 | void EmitMovwMovtPlaceholder(CodeGeneratorARMVIXL::PcRelativePatchInfo* labels, |
| 790 | vixl::aarch32::Register out); |
| 791 | |
Anton Kirilov | 5601d4e | 2017-05-11 19:33:50 +0100 | [diff] [blame] | 792 | // `temp` is an extra temporary register that is used for some conditions; |
| 793 | // callers may not specify it, in which case the method will use a scratch |
| 794 | // register instead. |
| 795 | void GenerateConditionWithZero(IfCondition condition, |
| 796 | vixl::aarch32::Register out, |
| 797 | vixl::aarch32::Register in, |
| 798 | vixl::aarch32::Register temp = vixl32::Register()); |
| 799 | |
Evgeny Astigeevich | 98416bf | 2019-09-09 14:52:12 +0100 | [diff] [blame] | 800 | void MaybeRecordImplicitNullCheck(HInstruction* instr) final { |
| 801 | // The function must be only be called within special scopes |
| 802 | // (EmissionCheckScope, ExactAssemblyScope) which prevent generation of |
| 803 | // veneer/literal pools by VIXL assembler. |
| 804 | CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) |
| 805 | << "The function must only be called within EmissionCheckScope or ExactAssemblyScope"; |
| 806 | CodeGenerator::MaybeRecordImplicitNullCheck(instr); |
| 807 | } |
| 808 | |
Nicolas Geoffray | e2a3aa9 | 2019-11-25 17:52:58 +0000 | [diff] [blame] | 809 | void MaybeGenerateInlineCacheCheck(HInstruction* instruction, vixl32::Register klass); |
Nicolas Geoffray | a59af8a | 2019-11-27 17:42:32 +0000 | [diff] [blame] | 810 | void MaybeIncrementHotness(bool is_frame_entry); |
Nicolas Geoffray | e2a3aa9 | 2019-11-25 17:52:58 +0000 | [diff] [blame] | 811 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 812 | private: |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 813 | // Encoding of thunk type and data for link-time generated thunks for Baker read barriers. |
| 814 | |
| 815 | enum class BakerReadBarrierKind : uint8_t { |
Vladimir Marko | 3d350a8 | 2020-11-18 14:14:27 +0000 | [diff] [blame] | 816 | kField, // Field get or array get with constant offset (i.e. constant index). |
| 817 | kArray, // Array get with index in register. |
| 818 | kGcRoot, // GC root load. |
| 819 | kIntrinsicCas, // Unsafe/VarHandle CAS intrinsic. |
| 820 | kLast = kIntrinsicCas |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 821 | }; |
| 822 | |
| 823 | enum class BakerReadBarrierWidth : uint8_t { |
| 824 | kWide, // 32-bit LDR (and 32-bit NEG if heap poisoning is enabled). |
| 825 | kNarrow, // 16-bit LDR (and 16-bit NEG if heap poisoning is enabled). |
| 826 | kLast = kNarrow |
| 827 | }; |
| 828 | |
| 829 | static constexpr uint32_t kBakerReadBarrierInvalidEncodedReg = /* pc is invalid */ 15u; |
| 830 | |
| 831 | static constexpr size_t kBitsForBakerReadBarrierKind = |
| 832 | MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierKind::kLast)); |
| 833 | static constexpr size_t kBakerReadBarrierBitsForRegister = |
| 834 | MinimumBitsToStore(kBakerReadBarrierInvalidEncodedReg); |
| 835 | using BakerReadBarrierKindField = |
| 836 | BitField<BakerReadBarrierKind, 0, kBitsForBakerReadBarrierKind>; |
| 837 | using BakerReadBarrierFirstRegField = |
| 838 | BitField<uint32_t, kBitsForBakerReadBarrierKind, kBakerReadBarrierBitsForRegister>; |
| 839 | using BakerReadBarrierSecondRegField = |
| 840 | BitField<uint32_t, |
| 841 | kBitsForBakerReadBarrierKind + kBakerReadBarrierBitsForRegister, |
| 842 | kBakerReadBarrierBitsForRegister>; |
| 843 | static constexpr size_t kBitsForBakerReadBarrierWidth = |
| 844 | MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierWidth::kLast)); |
| 845 | using BakerReadBarrierWidthField = |
| 846 | BitField<BakerReadBarrierWidth, |
| 847 | kBitsForBakerReadBarrierKind + 2 * kBakerReadBarrierBitsForRegister, |
| 848 | kBitsForBakerReadBarrierWidth>; |
| 849 | |
| 850 | static void CheckValidReg(uint32_t reg) { |
Vladimir Marko | dcd117e | 2018-04-19 11:54:00 +0100 | [diff] [blame] | 851 | DCHECK(reg < vixl::aarch32::ip.GetCode() && reg != mr.GetCode()) << reg; |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 852 | } |
| 853 | |
| 854 | static uint32_t EncodeBakerReadBarrierFieldData(uint32_t base_reg, |
| 855 | uint32_t holder_reg, |
| 856 | bool narrow) { |
| 857 | CheckValidReg(base_reg); |
| 858 | CheckValidReg(holder_reg); |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 859 | DCHECK_IMPLIES(narrow, base_reg < 8u) << base_reg; |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 860 | BakerReadBarrierWidth width = |
| 861 | narrow ? BakerReadBarrierWidth::kNarrow : BakerReadBarrierWidth::kWide; |
| 862 | return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kField) | |
| 863 | BakerReadBarrierFirstRegField::Encode(base_reg) | |
| 864 | BakerReadBarrierSecondRegField::Encode(holder_reg) | |
| 865 | BakerReadBarrierWidthField::Encode(width); |
| 866 | } |
| 867 | |
| 868 | static uint32_t EncodeBakerReadBarrierArrayData(uint32_t base_reg) { |
| 869 | CheckValidReg(base_reg); |
| 870 | return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kArray) | |
| 871 | BakerReadBarrierFirstRegField::Encode(base_reg) | |
| 872 | BakerReadBarrierSecondRegField::Encode(kBakerReadBarrierInvalidEncodedReg) | |
| 873 | BakerReadBarrierWidthField::Encode(BakerReadBarrierWidth::kWide); |
| 874 | } |
| 875 | |
| 876 | static uint32_t EncodeBakerReadBarrierGcRootData(uint32_t root_reg, bool narrow) { |
| 877 | CheckValidReg(root_reg); |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 878 | DCHECK_IMPLIES(narrow, root_reg < 8u) << root_reg; |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 879 | BakerReadBarrierWidth width = |
| 880 | narrow ? BakerReadBarrierWidth::kNarrow : BakerReadBarrierWidth::kWide; |
| 881 | return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kGcRoot) | |
| 882 | BakerReadBarrierFirstRegField::Encode(root_reg) | |
| 883 | BakerReadBarrierSecondRegField::Encode(kBakerReadBarrierInvalidEncodedReg) | |
| 884 | BakerReadBarrierWidthField::Encode(width); |
| 885 | } |
| 886 | |
Vladimir Marko | 3d350a8 | 2020-11-18 14:14:27 +0000 | [diff] [blame] | 887 | static uint32_t EncodeBakerReadBarrierIntrinsicCasData(uint32_t root_reg) { |
Vladimir Marko | d887ed8 | 2018-08-14 13:52:12 +0000 | [diff] [blame] | 888 | CheckValidReg(root_reg); |
Vladimir Marko | 3d350a8 | 2020-11-18 14:14:27 +0000 | [diff] [blame] | 889 | return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kIntrinsicCas) | |
Vladimir Marko | d887ed8 | 2018-08-14 13:52:12 +0000 | [diff] [blame] | 890 | BakerReadBarrierFirstRegField::Encode(root_reg) | |
| 891 | BakerReadBarrierSecondRegField::Encode(kBakerReadBarrierInvalidEncodedReg) | |
| 892 | BakerReadBarrierWidthField::Encode(BakerReadBarrierWidth::kWide); |
| 893 | } |
| 894 | |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 895 | void CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler, |
| 896 | uint32_t encoded_data, |
| 897 | /*out*/ std::string* debug_name); |
| 898 | |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 899 | using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, VIXLUInt32Literal*>; |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 900 | using StringToLiteralMap = ArenaSafeMap<StringReference, |
| 901 | VIXLUInt32Literal*, |
| 902 | StringReferenceValueComparator>; |
| 903 | using TypeToLiteralMap = ArenaSafeMap<TypeReference, |
| 904 | VIXLUInt32Literal*, |
| 905 | TypeReferenceValueComparator>; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 906 | |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 907 | struct BakerReadBarrierPatchInfo { |
| 908 | explicit BakerReadBarrierPatchInfo(uint32_t data) : label(), custom_data(data) { } |
| 909 | |
| 910 | vixl::aarch32::Label label; |
| 911 | uint32_t custom_data; |
| 912 | }; |
| 913 | |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 914 | VIXLUInt32Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map); |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 915 | PcRelativePatchInfo* NewPcRelativePatch(const DexFile* dex_file, |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 916 | uint32_t offset_or_index, |
| 917 | ArenaDeque<PcRelativePatchInfo>* patches); |
Vladimir Marko | d8dbc8d | 2017-09-20 13:37:47 +0100 | [diff] [blame] | 918 | template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 919 | static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos, |
Vladimir Marko | d8dbc8d | 2017-09-20 13:37:47 +0100 | [diff] [blame] | 920 | ArenaVector<linker::LinkerPatch>* linker_patches); |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 921 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 922 | // Labels for each block that will be compiled. |
| 923 | // We use a deque so that the `vixl::aarch32::Label` objects do not move in memory. |
| 924 | ArenaDeque<vixl::aarch32::Label> block_labels_; // Indexed by block id. |
| 925 | vixl::aarch32::Label frame_entry_label_; |
| 926 | |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 927 | ArenaVector<std::unique_ptr<JumpTableARMVIXL>> jump_tables_; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 928 | LocationsBuilderARMVIXL location_builder_; |
| 929 | InstructionCodeGeneratorARMVIXL instruction_visitor_; |
| 930 | ParallelMoveResolverARMVIXL move_resolver_; |
| 931 | |
| 932 | ArmVIXLAssembler assembler_; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 933 | |
Vladimir Marko | 2d06e02 | 2019-07-08 15:45:19 +0100 | [diff] [blame] | 934 | // PC-relative method patch info for kBootImageLinkTimePcRelative. |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 935 | ArenaDeque<PcRelativePatchInfo> boot_image_method_patches_; |
Vladimir Marko | 0eb882b | 2017-05-15 13:39:18 +0100 | [diff] [blame] | 936 | // PC-relative method patch info for kBssEntry. |
| 937 | ArenaDeque<PcRelativePatchInfo> method_bss_entry_patches_; |
Vladimir Marko | 1998cd0 | 2017-01-13 13:02:58 +0000 | [diff] [blame] | 938 | // PC-relative type patch info for kBootImageLinkTimePcRelative. |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 939 | ArenaDeque<PcRelativePatchInfo> boot_image_type_patches_; |
Vladimir Marko | 1998cd0 | 2017-01-13 13:02:58 +0000 | [diff] [blame] | 940 | // PC-relative type patch info for kBssEntry. |
| 941 | ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_; |
Vladimir Marko | 8f63f10 | 2020-09-28 12:10:28 +0100 | [diff] [blame] | 942 | // PC-relative public type patch info for kBssEntryPublic. |
| 943 | ArenaDeque<PcRelativePatchInfo> public_type_bss_entry_patches_; |
| 944 | // PC-relative package type patch info for kBssEntryPackage. |
| 945 | ArenaDeque<PcRelativePatchInfo> package_type_bss_entry_patches_; |
Vladimir Marko | e47f60c | 2018-02-21 13:43:28 +0000 | [diff] [blame] | 946 | // PC-relative String patch info for kBootImageLinkTimePcRelative. |
Vladimir Marko | 59eb30f | 2018-02-20 11:52:34 +0000 | [diff] [blame] | 947 | ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_; |
Vladimir Marko | 6cfbdbc | 2017-07-25 13:26:39 +0100 | [diff] [blame] | 948 | // PC-relative String patch info for kBssEntry. |
| 949 | ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_; |
Vladimir Marko | 2d06e02 | 2019-07-08 15:45:19 +0100 | [diff] [blame] | 950 | // PC-relative patch info for IntrinsicObjects for the boot image, |
| 951 | // and for method/type/string patches for kBootImageRelRo otherwise. |
| 952 | ArenaDeque<PcRelativePatchInfo> boot_image_other_patches_; |
Vladimir Marko | f667508 | 2019-05-17 12:05:28 +0100 | [diff] [blame] | 953 | // Patch info for calls to entrypoint dispatch thunks. Used for slow paths. |
| 954 | ArenaDeque<PatchInfo<vixl::aarch32::Label>> call_entrypoint_patches_; |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 955 | // Baker read barrier patch info. |
| 956 | ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_; |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 957 | |
Vladimir Marko | f667508 | 2019-05-17 12:05:28 +0100 | [diff] [blame] | 958 | // Deduplication map for 32-bit literals, used for JIT for boot image addresses. |
| 959 | Uint32ToLiteralMap uint32_literals_; |
Artem Serov | c5fcb44 | 2016-12-02 19:19:58 +0000 | [diff] [blame] | 960 | // Patches for string literals in JIT compiled code. |
| 961 | StringToLiteralMap jit_string_patches_; |
| 962 | // Patches for class literals in JIT compiled code. |
| 963 | TypeToLiteralMap jit_class_patches_; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 964 | |
Vladimir Marko | 966b46f | 2018-08-03 10:20:19 +0000 | [diff] [blame] | 965 | // Baker read barrier slow paths, mapping custom data (uint32_t) to label. |
| 966 | // Wrap the label to work around vixl::aarch32::Label being non-copyable |
| 967 | // and non-moveable and as such unusable in ArenaSafeMap<>. |
| 968 | struct LabelWrapper { |
| 969 | LabelWrapper(const LabelWrapper& src) |
| 970 | : label() { |
| 971 | DCHECK(!src.label.IsReferenced() && !src.label.IsBound()); |
| 972 | } |
| 973 | LabelWrapper() = default; |
| 974 | vixl::aarch32::Label label; |
| 975 | }; |
| 976 | ArenaSafeMap<uint32_t, LabelWrapper> jit_baker_read_barrier_slow_paths_; |
| 977 | |
Vladimir Marko | ca1e038 | 2018-04-11 09:58:41 +0000 | [diff] [blame] | 978 | friend class linker::Thumb2RelativePatcherTest; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 979 | DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARMVIXL); |
| 980 | }; |
| 981 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 982 | } // namespace arm |
| 983 | } // namespace art |
| 984 | |
| 985 | #endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_ |