Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_ |
| 18 | #define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_ |
| 19 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 20 | #include "base/enums.h" |
| 21 | #include "code_generator.h" |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 22 | #include "common_arm.h" |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 23 | #include "driver/compiler_options.h" |
| 24 | #include "nodes.h" |
| 25 | #include "string_reference.h" |
| 26 | #include "parallel_move_resolver.h" |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 27 | #include "utils/arm/assembler_arm_vixl.h" |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 28 | #include "utils/type_reference.h" |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 29 | |
| 30 | // TODO(VIXL): make vixl clean wrt -Wshadow. |
| 31 | #pragma GCC diagnostic push |
| 32 | #pragma GCC diagnostic ignored "-Wshadow" |
| 33 | #include "aarch32/constants-aarch32.h" |
| 34 | #include "aarch32/instructions-aarch32.h" |
| 35 | #include "aarch32/macro-assembler-aarch32.h" |
| 36 | #pragma GCC diagnostic pop |
| 37 | |
| 38 | // True if VIXL32 should be used for codegen on ARM. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 39 | #ifdef ART_USE_VIXL_ARM_BACKEND |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 40 | static constexpr bool kArmUseVIXL32 = true; |
| 41 | #else |
| 42 | static constexpr bool kArmUseVIXL32 = false; |
| 43 | #endif |
| 44 | |
| 45 | namespace art { |
| 46 | namespace arm { |
| 47 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 48 | static const vixl::aarch32::Register kParameterCoreRegistersVIXL[] = { |
| 49 | vixl::aarch32::r1, |
| 50 | vixl::aarch32::r2, |
| 51 | vixl::aarch32::r3 |
| 52 | }; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 53 | static const size_t kParameterCoreRegistersLengthVIXL = arraysize(kParameterCoreRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 54 | static const vixl::aarch32::SRegister kParameterFpuRegistersVIXL[] = { |
| 55 | vixl::aarch32::s0, |
| 56 | vixl::aarch32::s1, |
| 57 | vixl::aarch32::s2, |
| 58 | vixl::aarch32::s3, |
| 59 | vixl::aarch32::s4, |
| 60 | vixl::aarch32::s5, |
| 61 | vixl::aarch32::s6, |
| 62 | vixl::aarch32::s7, |
| 63 | vixl::aarch32::s8, |
| 64 | vixl::aarch32::s9, |
| 65 | vixl::aarch32::s10, |
| 66 | vixl::aarch32::s11, |
| 67 | vixl::aarch32::s12, |
| 68 | vixl::aarch32::s13, |
| 69 | vixl::aarch32::s14, |
| 70 | vixl::aarch32::s15 |
| 71 | }; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 72 | static const size_t kParameterFpuRegistersLengthVIXL = arraysize(kParameterFpuRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 73 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 74 | static const vixl::aarch32::Register kMethodRegister = vixl::aarch32::r0; |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 75 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 76 | static const vixl::aarch32::Register kCoreAlwaysSpillRegister = vixl::aarch32::r5; |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 77 | |
| 78 | // Callee saves core registers r5, r6, r7, r8, r10, r11, and lr. |
| 79 | static const vixl::aarch32::RegisterList kCoreCalleeSaves = vixl::aarch32::RegisterList::Union( |
| 80 | vixl::aarch32::RegisterList(vixl::aarch32::r5, |
| 81 | vixl::aarch32::r6, |
| 82 | vixl::aarch32::r7, |
| 83 | vixl::aarch32::r8), |
| 84 | vixl::aarch32::RegisterList(vixl::aarch32::r10, |
| 85 | vixl::aarch32::r11, |
| 86 | vixl::aarch32::lr)); |
| 87 | |
| 88 | // Callee saves FP registers s16 to s31 inclusive. |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 89 | static const vixl::aarch32::SRegisterList kFpuCalleeSaves = |
| 90 | vixl::aarch32::SRegisterList(vixl::aarch32::s16, 16); |
| 91 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 92 | static const vixl::aarch32::Register kRuntimeParameterCoreRegistersVIXL[] = { |
| 93 | vixl::aarch32::r0, |
| 94 | vixl::aarch32::r1, |
| 95 | vixl::aarch32::r2, |
| 96 | vixl::aarch32::r3 |
| 97 | }; |
| 98 | static const size_t kRuntimeParameterCoreRegistersLengthVIXL = |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 99 | arraysize(kRuntimeParameterCoreRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 100 | static const vixl::aarch32::SRegister kRuntimeParameterFpuRegistersVIXL[] = { |
| 101 | vixl::aarch32::s0, |
| 102 | vixl::aarch32::s1, |
| 103 | vixl::aarch32::s2, |
| 104 | vixl::aarch32::s3 |
| 105 | }; |
| 106 | static const size_t kRuntimeParameterFpuRegistersLengthVIXL = |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 107 | arraysize(kRuntimeParameterFpuRegistersVIXL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 108 | |
| 109 | class LoadClassSlowPathARMVIXL; |
| 110 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 111 | class CodeGeneratorARMVIXL; |
| 112 | |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 113 | class JumpTableARMVIXL : public DeletableArenaObject<kArenaAllocSwitchTable> { |
| 114 | public: |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 115 | typedef vixl::aarch32::Literal<int32_t> IntLiteral; |
| 116 | |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 117 | explicit JumpTableARMVIXL(HPackedSwitch* switch_instr) |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 118 | : switch_instr_(switch_instr), |
| 119 | table_start_(), |
| 120 | bb_addresses_(switch_instr->GetArena()->Adapter(kArenaAllocCodeGenerator)) { |
| 121 | uint32_t num_entries = switch_instr_->GetNumEntries(); |
| 122 | for (uint32_t i = 0; i < num_entries; i++) { |
Artem Serov | 0fb3719 | 2016-12-06 18:13:40 +0000 | [diff] [blame] | 123 | IntLiteral *lit = new IntLiteral(0, vixl32::RawLiteral::kManuallyPlaced); |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 124 | bb_addresses_.emplace_back(lit); |
| 125 | } |
| 126 | } |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 127 | |
| 128 | vixl::aarch32::Label* GetTableStartLabel() { return &table_start_; } |
| 129 | |
| 130 | void EmitTable(CodeGeneratorARMVIXL* codegen); |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 131 | void FixTable(CodeGeneratorARMVIXL* codegen); |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 132 | |
| 133 | private: |
| 134 | HPackedSwitch* const switch_instr_; |
| 135 | vixl::aarch32::Label table_start_; |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 136 | ArenaVector<std::unique_ptr<IntLiteral>> bb_addresses_; |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 137 | |
| 138 | DISALLOW_COPY_AND_ASSIGN(JumpTableARMVIXL); |
| 139 | }; |
| 140 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 141 | class InvokeRuntimeCallingConventionARMVIXL |
| 142 | : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> { |
| 143 | public: |
| 144 | InvokeRuntimeCallingConventionARMVIXL() |
| 145 | : CallingConvention(kRuntimeParameterCoreRegistersVIXL, |
| 146 | kRuntimeParameterCoreRegistersLengthVIXL, |
| 147 | kRuntimeParameterFpuRegistersVIXL, |
| 148 | kRuntimeParameterFpuRegistersLengthVIXL, |
| 149 | kArmPointerSize) {} |
| 150 | |
| 151 | private: |
| 152 | DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConventionARMVIXL); |
| 153 | }; |
| 154 | |
| 155 | class InvokeDexCallingConventionARMVIXL |
| 156 | : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> { |
| 157 | public: |
| 158 | InvokeDexCallingConventionARMVIXL() |
| 159 | : CallingConvention(kParameterCoreRegistersVIXL, |
| 160 | kParameterCoreRegistersLengthVIXL, |
| 161 | kParameterFpuRegistersVIXL, |
| 162 | kParameterFpuRegistersLengthVIXL, |
| 163 | kArmPointerSize) {} |
| 164 | |
| 165 | private: |
| 166 | DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionARMVIXL); |
| 167 | }; |
| 168 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 169 | class InvokeDexCallingConventionVisitorARMVIXL : public InvokeDexCallingConventionVisitor { |
| 170 | public: |
| 171 | InvokeDexCallingConventionVisitorARMVIXL() {} |
| 172 | virtual ~InvokeDexCallingConventionVisitorARMVIXL() {} |
| 173 | |
| 174 | Location GetNextLocation(Primitive::Type type) OVERRIDE; |
| 175 | Location GetReturnLocation(Primitive::Type type) const OVERRIDE; |
| 176 | Location GetMethodLocation() const OVERRIDE; |
| 177 | |
| 178 | private: |
| 179 | InvokeDexCallingConventionARMVIXL calling_convention; |
| 180 | uint32_t double_index_ = 0; |
| 181 | |
| 182 | DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorARMVIXL); |
| 183 | }; |
| 184 | |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 185 | class FieldAccessCallingConventionARMVIXL : public FieldAccessCallingConvention { |
| 186 | public: |
| 187 | FieldAccessCallingConventionARMVIXL() {} |
| 188 | |
| 189 | Location GetObjectLocation() const OVERRIDE { |
| 190 | return helpers::LocationFrom(vixl::aarch32::r1); |
| 191 | } |
| 192 | Location GetFieldIndexLocation() const OVERRIDE { |
| 193 | return helpers::LocationFrom(vixl::aarch32::r0); |
| 194 | } |
| 195 | Location GetReturnLocation(Primitive::Type type) const OVERRIDE { |
| 196 | return Primitive::Is64BitType(type) |
| 197 | ? helpers::LocationFrom(vixl::aarch32::r0, vixl::aarch32::r1) |
| 198 | : helpers::LocationFrom(vixl::aarch32::r0); |
| 199 | } |
| 200 | Location GetSetValueLocation(Primitive::Type type, bool is_instance) const OVERRIDE { |
| 201 | return Primitive::Is64BitType(type) |
| 202 | ? helpers::LocationFrom(vixl::aarch32::r2, vixl::aarch32::r3) |
| 203 | : (is_instance |
| 204 | ? helpers::LocationFrom(vixl::aarch32::r2) |
| 205 | : helpers::LocationFrom(vixl::aarch32::r1)); |
| 206 | } |
| 207 | Location GetFpuLocation(Primitive::Type type) const OVERRIDE { |
| 208 | return Primitive::Is64BitType(type) |
| 209 | ? helpers::LocationFrom(vixl::aarch32::s0, vixl::aarch32::s1) |
| 210 | : helpers::LocationFrom(vixl::aarch32::s0); |
| 211 | } |
| 212 | |
| 213 | private: |
| 214 | DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionARMVIXL); |
| 215 | }; |
| 216 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 217 | class SlowPathCodeARMVIXL : public SlowPathCode { |
| 218 | public: |
| 219 | explicit SlowPathCodeARMVIXL(HInstruction* instruction) |
| 220 | : SlowPathCode(instruction), entry_label_(), exit_label_() {} |
| 221 | |
| 222 | vixl::aarch32::Label* GetEntryLabel() { return &entry_label_; } |
| 223 | vixl::aarch32::Label* GetExitLabel() { return &exit_label_; } |
| 224 | |
| 225 | void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE; |
| 226 | void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE; |
| 227 | |
| 228 | private: |
| 229 | vixl::aarch32::Label entry_label_; |
| 230 | vixl::aarch32::Label exit_label_; |
| 231 | |
| 232 | DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARMVIXL); |
| 233 | }; |
| 234 | |
| 235 | class ParallelMoveResolverARMVIXL : public ParallelMoveResolverWithSwap { |
| 236 | public: |
| 237 | ParallelMoveResolverARMVIXL(ArenaAllocator* allocator, CodeGeneratorARMVIXL* codegen) |
| 238 | : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {} |
| 239 | |
| 240 | void EmitMove(size_t index) OVERRIDE; |
| 241 | void EmitSwap(size_t index) OVERRIDE; |
| 242 | void SpillScratch(int reg) OVERRIDE; |
| 243 | void RestoreScratch(int reg) OVERRIDE; |
| 244 | |
| 245 | ArmVIXLAssembler* GetAssembler() const; |
| 246 | |
| 247 | private: |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 248 | void Exchange(vixl32::Register reg, int mem); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 249 | void Exchange(int mem1, int mem2); |
| 250 | |
| 251 | CodeGeneratorARMVIXL* const codegen_; |
| 252 | |
| 253 | DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverARMVIXL); |
| 254 | }; |
| 255 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 256 | class LocationsBuilderARMVIXL : public HGraphVisitor { |
| 257 | public: |
| 258 | LocationsBuilderARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen) |
| 259 | : HGraphVisitor(graph), codegen_(codegen) {} |
| 260 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 261 | #define DECLARE_VISIT_INSTRUCTION(name, super) \ |
| 262 | void Visit##name(H##name* instr) OVERRIDE; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 263 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 264 | FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION) |
| 265 | FOR_EACH_CONCRETE_INSTRUCTION_ARM(DECLARE_VISIT_INSTRUCTION) |
| 266 | FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION) |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 267 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 268 | #undef DECLARE_VISIT_INSTRUCTION |
| 269 | |
| 270 | void VisitInstruction(HInstruction* instruction) OVERRIDE { |
| 271 | LOG(FATAL) << "Unreachable instruction " << instruction->DebugName() |
| 272 | << " (id " << instruction->GetId() << ")"; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 273 | } |
| 274 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 275 | private: |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 276 | void HandleInvoke(HInvoke* invoke); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 277 | void HandleBitwiseOperation(HBinaryOperation* operation, Opcode opcode); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 278 | void HandleCondition(HCondition* condition); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 279 | void HandleIntegerRotate(LocationSummary* locations); |
| 280 | void HandleLongRotate(LocationSummary* locations); |
| 281 | void HandleShift(HBinaryOperation* operation); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 282 | void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info); |
| 283 | void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 284 | |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 285 | Location ArithmeticZeroOrFpuRegister(HInstruction* input); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 286 | Location ArmEncodableConstantOrRegister(HInstruction* constant, Opcode opcode); |
| 287 | bool CanEncodeConstantAsImmediate(HConstant* input_cst, Opcode opcode); |
| 288 | bool CanEncodeConstantAsImmediate(uint32_t value, Opcode opcode, SetCc set_cc = kCcDontCare); |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 289 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 290 | CodeGeneratorARMVIXL* const codegen_; |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 291 | InvokeDexCallingConventionVisitorARMVIXL parameter_visitor_; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 292 | |
| 293 | DISALLOW_COPY_AND_ASSIGN(LocationsBuilderARMVIXL); |
| 294 | }; |
| 295 | |
| 296 | class InstructionCodeGeneratorARMVIXL : public InstructionCodeGenerator { |
| 297 | public: |
| 298 | InstructionCodeGeneratorARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen); |
| 299 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 300 | #define DECLARE_VISIT_INSTRUCTION(name, super) \ |
| 301 | void Visit##name(H##name* instr) OVERRIDE; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 302 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 303 | FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION) |
| 304 | FOR_EACH_CONCRETE_INSTRUCTION_ARM(DECLARE_VISIT_INSTRUCTION) |
| 305 | FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION) |
| 306 | |
| 307 | #undef DECLARE_VISIT_INSTRUCTION |
| 308 | |
| 309 | void VisitInstruction(HInstruction* instruction) OVERRIDE { |
| 310 | LOG(FATAL) << "Unreachable instruction " << instruction->DebugName() |
| 311 | << " (id " << instruction->GetId() << ")"; |
| 312 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 313 | |
| 314 | ArmVIXLAssembler* GetAssembler() const { return assembler_; } |
xueliang.zhong | f51bc62 | 2016-11-04 09:23:32 +0000 | [diff] [blame] | 315 | ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 316 | |
| 317 | private: |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 318 | // Generate code for the given suspend check. If not null, `successor` |
| 319 | // is the block to branch to if the suspend check is not needed, and after |
| 320 | // the suspend call. |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 321 | void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 322 | void GenerateClassInitializationCheck(LoadClassSlowPathARMVIXL* slow_path, |
| 323 | vixl32::Register class_reg); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 324 | void GenerateAndConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value); |
| 325 | void GenerateOrrConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value); |
| 326 | void GenerateEorConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value); |
Anton Kirilov | dda4396 | 2016-11-21 19:55:20 +0000 | [diff] [blame] | 327 | void GenerateAddLongConst(Location out, Location first, uint64_t value); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 328 | void HandleBitwiseOperation(HBinaryOperation* operation); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 329 | void HandleCondition(HCondition* condition); |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 330 | void HandleIntegerRotate(HRor* ror); |
| 331 | void HandleLongRotate(HRor* ror); |
| 332 | void HandleShift(HBinaryOperation* operation); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 333 | |
| 334 | void GenerateWideAtomicStore(vixl::aarch32::Register addr, |
| 335 | uint32_t offset, |
| 336 | vixl::aarch32::Register value_lo, |
| 337 | vixl::aarch32::Register value_hi, |
| 338 | vixl::aarch32::Register temp1, |
| 339 | vixl::aarch32::Register temp2, |
| 340 | HInstruction* instruction); |
| 341 | void GenerateWideAtomicLoad(vixl::aarch32::Register addr, |
| 342 | uint32_t offset, |
| 343 | vixl::aarch32::Register out_lo, |
| 344 | vixl::aarch32::Register out_hi); |
| 345 | |
| 346 | void HandleFieldSet(HInstruction* instruction, |
| 347 | const FieldInfo& field_info, |
| 348 | bool value_can_be_null); |
| 349 | void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info); |
| 350 | |
Artem Serov | cfbe913 | 2016-10-14 15:58:56 +0100 | [diff] [blame] | 351 | // Generate a heap reference load using one register `out`: |
| 352 | // |
| 353 | // out <- *(out + offset) |
| 354 | // |
| 355 | // while honoring heap poisoning and/or read barriers (if any). |
| 356 | // |
| 357 | // Location `maybe_temp` is used when generating a read barrier and |
| 358 | // shall be a register in that case; it may be an invalid location |
| 359 | // otherwise. |
| 360 | void GenerateReferenceLoadOneRegister(HInstruction* instruction, |
| 361 | Location out, |
| 362 | uint32_t offset, |
Artem Serov | 657022c | 2016-11-23 14:19:38 +0000 | [diff] [blame] | 363 | Location maybe_temp, |
| 364 | ReadBarrierOption read_barrier_option); |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 365 | // Generate a heap reference load using two different registers |
| 366 | // `out` and `obj`: |
| 367 | // |
| 368 | // out <- *(obj + offset) |
| 369 | // |
| 370 | // while honoring heap poisoning and/or read barriers (if any). |
| 371 | // |
| 372 | // Location `maybe_temp` is used when generating a Baker's (fast |
| 373 | // path) read barrier and shall be a register in that case; it may |
| 374 | // be an invalid location otherwise. |
| 375 | void GenerateReferenceLoadTwoRegisters(HInstruction* instruction, |
| 376 | Location out, |
| 377 | Location obj, |
| 378 | uint32_t offset, |
Artem Serov | 657022c | 2016-11-23 14:19:38 +0000 | [diff] [blame] | 379 | Location maybe_temp, |
| 380 | ReadBarrierOption read_barrier_option); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 381 | // Generate a GC root reference load: |
| 382 | // |
| 383 | // root <- *(obj + offset) |
| 384 | // |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 385 | // while honoring read barriers based on read_barrier_option. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 386 | void GenerateGcRootFieldLoad(HInstruction* instruction, |
| 387 | Location root, |
| 388 | vixl::aarch32::Register obj, |
| 389 | uint32_t offset, |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 390 | ReadBarrierOption read_barrier_option); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 391 | void GenerateTestAndBranch(HInstruction* instruction, |
| 392 | size_t condition_input_index, |
| 393 | vixl::aarch32::Label* true_target, |
xueliang.zhong | f51bc62 | 2016-11-04 09:23:32 +0000 | [diff] [blame] | 394 | vixl::aarch32::Label* false_target, |
| 395 | bool far_target = true); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 396 | void GenerateCompareTestAndBranch(HCondition* condition, |
| 397 | vixl::aarch32::Label* true_target, |
| 398 | vixl::aarch32::Label* false_target); |
| 399 | void GenerateVcmp(HInstruction* instruction); |
| 400 | void GenerateFPJumps(HCondition* cond, |
| 401 | vixl::aarch32::Label* true_label, |
| 402 | vixl::aarch32::Label* false_label); |
| 403 | void GenerateLongComparesAndJumps(HCondition* cond, |
| 404 | vixl::aarch32::Label* true_label, |
| 405 | vixl::aarch32::Label* false_label); |
| 406 | void DivRemOneOrMinusOne(HBinaryOperation* instruction); |
| 407 | void DivRemByPowerOfTwo(HBinaryOperation* instruction); |
| 408 | void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction); |
| 409 | void GenerateDivRemConstantIntegral(HBinaryOperation* instruction); |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 410 | void HandleGoto(HInstruction* got, HBasicBlock* successor); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 411 | |
| 412 | ArmVIXLAssembler* const assembler_; |
| 413 | CodeGeneratorARMVIXL* const codegen_; |
| 414 | |
| 415 | DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorARMVIXL); |
| 416 | }; |
| 417 | |
| 418 | class CodeGeneratorARMVIXL : public CodeGenerator { |
| 419 | public: |
| 420 | CodeGeneratorARMVIXL(HGraph* graph, |
| 421 | const ArmInstructionSetFeatures& isa_features, |
| 422 | const CompilerOptions& compiler_options, |
| 423 | OptimizingCompilerStats* stats = nullptr); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 424 | virtual ~CodeGeneratorARMVIXL() {} |
| 425 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 426 | void GenerateFrameEntry() OVERRIDE; |
| 427 | void GenerateFrameExit() OVERRIDE; |
| 428 | void Bind(HBasicBlock* block) OVERRIDE; |
| 429 | void MoveConstant(Location destination, int32_t value) OVERRIDE; |
| 430 | void MoveLocation(Location dst, Location src, Primitive::Type dst_type) OVERRIDE; |
| 431 | void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE; |
| 432 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 433 | size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE; |
| 434 | size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE; |
| 435 | size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE; |
| 436 | size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE; |
| 437 | |
| 438 | size_t GetWordSize() const OVERRIDE { |
| 439 | return static_cast<size_t>(kArmPointerSize); |
| 440 | } |
| 441 | |
| 442 | size_t GetFloatingPointSpillSlotSize() const OVERRIDE { return vixl::aarch32::kRegSizeInBytes; } |
| 443 | |
| 444 | HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; } |
| 445 | |
| 446 | HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; } |
| 447 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 448 | ArmVIXLAssembler* GetAssembler() OVERRIDE { return &assembler_; } |
| 449 | |
| 450 | const ArmVIXLAssembler& GetAssembler() const OVERRIDE { return assembler_; } |
| 451 | |
xueliang.zhong | f51bc62 | 2016-11-04 09:23:32 +0000 | [diff] [blame] | 452 | ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 453 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 454 | uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE { |
| 455 | vixl::aarch32::Label* block_entry_label = GetLabelOf(block); |
| 456 | DCHECK(block_entry_label->IsBound()); |
| 457 | return block_entry_label->GetLocation(); |
| 458 | } |
| 459 | |
Artem Serov | 09a940d | 2016-11-11 16:15:11 +0000 | [diff] [blame] | 460 | void FixJumpTables(); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 461 | void SetupBlockedRegisters() const OVERRIDE; |
| 462 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 463 | void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE; |
| 464 | void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE; |
| 465 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 466 | ParallelMoveResolver* GetMoveResolver() OVERRIDE { return &move_resolver_; } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 467 | InstructionSet GetInstructionSet() const OVERRIDE { return InstructionSet::kThumb2; } |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 468 | // Helper method to move a 32-bit value between two locations. |
| 469 | void Move32(Location destination, Location source); |
| 470 | |
Scott Wakeling | c34dba7 | 2016-10-03 10:14:44 +0100 | [diff] [blame] | 471 | void LoadFromShiftedRegOffset(Primitive::Type type, |
| 472 | Location out_loc, |
| 473 | vixl::aarch32::Register base, |
| 474 | vixl::aarch32::Register reg_index, |
| 475 | vixl::aarch32::Condition cond = vixl::aarch32::al); |
| 476 | void StoreToShiftedRegOffset(Primitive::Type type, |
| 477 | Location out_loc, |
| 478 | vixl::aarch32::Register base, |
| 479 | vixl::aarch32::Register reg_index, |
| 480 | vixl::aarch32::Condition cond = vixl::aarch32::al); |
| 481 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 482 | // Generate code to invoke a runtime entry point. |
| 483 | void InvokeRuntime(QuickEntrypointEnum entrypoint, |
| 484 | HInstruction* instruction, |
| 485 | uint32_t dex_pc, |
| 486 | SlowPathCode* slow_path = nullptr) OVERRIDE; |
| 487 | |
| 488 | // Generate code to invoke a runtime entry point, but do not record |
| 489 | // PC-related information in a stack map. |
| 490 | void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset, |
| 491 | HInstruction* instruction, |
| 492 | SlowPathCode* slow_path); |
| 493 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 494 | // Emit a write barrier. |
| 495 | void MarkGCCard(vixl::aarch32::Register temp, |
| 496 | vixl::aarch32::Register card, |
| 497 | vixl::aarch32::Register object, |
| 498 | vixl::aarch32::Register value, |
| 499 | bool can_be_null); |
| 500 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 501 | void GenerateMemoryBarrier(MemBarrierKind kind); |
| 502 | |
| 503 | vixl::aarch32::Label* GetLabelOf(HBasicBlock* block) { |
| 504 | block = FirstNonEmptyBlock(block); |
| 505 | return &(block_labels_[block->GetBlockId()]); |
| 506 | } |
| 507 | |
| 508 | void Initialize() OVERRIDE { |
| 509 | block_labels_.resize(GetGraph()->GetBlocks().size()); |
| 510 | } |
| 511 | |
| 512 | void Finalize(CodeAllocator* allocator) OVERRIDE; |
| 513 | |
| 514 | const ArmInstructionSetFeatures& GetInstructionSetFeatures() const { return isa_features_; } |
| 515 | |
| 516 | bool NeedsTwoRegisters(Primitive::Type type) const OVERRIDE { |
| 517 | return type == Primitive::kPrimDouble || type == Primitive::kPrimLong; |
| 518 | } |
| 519 | |
| 520 | void ComputeSpillMask() OVERRIDE; |
| 521 | |
| 522 | vixl::aarch32::Label* GetFrameEntryLabel() { return &frame_entry_label_; } |
| 523 | |
| 524 | // Check if the desired_string_load_kind is supported. If it is, return it, |
| 525 | // otherwise return a fall-back kind that should be used instead. |
| 526 | HLoadString::LoadKind GetSupportedLoadStringKind( |
| 527 | HLoadString::LoadKind desired_string_load_kind) OVERRIDE; |
| 528 | |
| 529 | // Check if the desired_class_load_kind is supported. If it is, return it, |
| 530 | // otherwise return a fall-back kind that should be used instead. |
| 531 | HLoadClass::LoadKind GetSupportedLoadClassKind( |
| 532 | HLoadClass::LoadKind desired_class_load_kind) OVERRIDE; |
| 533 | |
| 534 | // Check if the desired_dispatch_info is supported. If it is, return it, |
| 535 | // otherwise return a fall-back info that should be used instead. |
| 536 | HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch( |
| 537 | const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, |
| 538 | HInvokeStaticOrDirect* invoke) OVERRIDE; |
| 539 | |
| 540 | void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE; |
| 541 | void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE; |
| 542 | |
| 543 | void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE; |
| 544 | |
| 545 | // The PcRelativePatchInfo is used for PC-relative addressing of dex cache arrays |
| 546 | // and boot image strings/types. The only difference is the interpretation of the |
| 547 | // offset_or_index. The PC-relative address is loaded with three instructions, |
| 548 | // MOVW+MOVT to load the offset to base_reg and then ADD base_reg, PC. The offset |
| 549 | // is calculated from the ADD's effective PC, i.e. PC+4 on Thumb2. Though we |
| 550 | // currently emit these 3 instructions together, instruction scheduling could |
| 551 | // split this sequence apart, so we keep separate labels for each of them. |
| 552 | struct PcRelativePatchInfo { |
| 553 | PcRelativePatchInfo(const DexFile& dex_file, uint32_t off_or_idx) |
| 554 | : target_dex_file(dex_file), offset_or_index(off_or_idx) { } |
| 555 | PcRelativePatchInfo(PcRelativePatchInfo&& other) = default; |
| 556 | |
| 557 | const DexFile& target_dex_file; |
| 558 | // Either the dex cache array element offset or the string/type index. |
| 559 | uint32_t offset_or_index; |
| 560 | vixl::aarch32::Label movw_label; |
| 561 | vixl::aarch32::Label movt_label; |
| 562 | vixl::aarch32::Label add_pc_label; |
| 563 | }; |
| 564 | |
| 565 | PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file, uint32_t string_index); |
| 566 | PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file, dex::TypeIndex type_index); |
| 567 | PcRelativePatchInfo* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, |
| 568 | uint32_t element_offset); |
| 569 | void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE; |
| 570 | |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 571 | // Fast path implementation of ReadBarrier::Barrier for a heap |
| 572 | // reference field load when Baker's read barriers are used. |
| 573 | void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction, |
| 574 | Location ref, |
| 575 | vixl::aarch32::Register obj, |
| 576 | uint32_t offset, |
| 577 | Location temp, |
| 578 | bool needs_null_check); |
Anton Kirilov | edb2ac3 | 2016-11-30 15:14:10 +0000 | [diff] [blame] | 579 | // Fast path implementation of ReadBarrier::Barrier for a heap |
| 580 | // reference array load when Baker's read barriers are used. |
| 581 | void GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction, |
| 582 | Location ref, |
| 583 | vixl::aarch32::Register obj, |
| 584 | uint32_t data_offset, |
| 585 | Location index, |
| 586 | Location temp, |
| 587 | bool needs_null_check); |
Anton Kirilov | e28d9ae | 2016-10-25 18:17:23 +0100 | [diff] [blame] | 588 | // Factored implementation, used by GenerateFieldLoadWithBakerReadBarrier, |
| 589 | // GenerateArrayLoadWithBakerReadBarrier and some intrinsics. |
| 590 | // |
| 591 | // Load the object reference located at the address |
| 592 | // `obj + offset + (index << scale_factor)`, held by object `obj`, into |
| 593 | // `ref`, and mark it if needed. |
| 594 | // |
| 595 | // If `always_update_field` is true, the value of the reference is |
| 596 | // atomically updated in the holder (`obj`). This operation |
| 597 | // requires an extra temporary register, which must be provided as a |
| 598 | // non-null pointer (`temp2`). |
| 599 | void GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction, |
| 600 | Location ref, |
| 601 | vixl::aarch32::Register obj, |
| 602 | uint32_t offset, |
| 603 | Location index, |
| 604 | ScaleFactor scale_factor, |
| 605 | Location temp, |
| 606 | bool needs_null_check, |
| 607 | bool always_update_field = false, |
| 608 | vixl::aarch32::Register* temp2 = nullptr); |
| 609 | |
| 610 | // Generate a read barrier for a heap reference within `instruction` |
| 611 | // using a slow path. |
| 612 | // |
| 613 | // A read barrier for an object reference read from the heap is |
| 614 | // implemented as a call to the artReadBarrierSlow runtime entry |
| 615 | // point, which is passed the values in locations `ref`, `obj`, and |
| 616 | // `offset`: |
| 617 | // |
| 618 | // mirror::Object* artReadBarrierSlow(mirror::Object* ref, |
| 619 | // mirror::Object* obj, |
| 620 | // uint32_t offset); |
| 621 | // |
| 622 | // The `out` location contains the value returned by |
| 623 | // artReadBarrierSlow. |
| 624 | // |
| 625 | // When `index` is provided (i.e. for array accesses), the offset |
| 626 | // value passed to artReadBarrierSlow is adjusted to take `index` |
| 627 | // into account. |
| 628 | void GenerateReadBarrierSlow(HInstruction* instruction, |
| 629 | Location out, |
| 630 | Location ref, |
| 631 | Location obj, |
| 632 | uint32_t offset, |
| 633 | Location index = Location::NoLocation()); |
| 634 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 635 | // If read barriers are enabled, generate a read barrier for a heap |
| 636 | // reference using a slow path. If heap poisoning is enabled, also |
| 637 | // unpoison the reference in `out`. |
| 638 | void MaybeGenerateReadBarrierSlow(HInstruction* instruction, |
| 639 | Location out, |
| 640 | Location ref, |
| 641 | Location obj, |
| 642 | uint32_t offset, |
| 643 | Location index = Location::NoLocation()); |
| 644 | |
Anton Kirilov | edb2ac3 | 2016-11-30 15:14:10 +0000 | [diff] [blame] | 645 | // Generate a read barrier for a GC root within `instruction` using |
| 646 | // a slow path. |
| 647 | // |
| 648 | // A read barrier for an object reference GC root is implemented as |
| 649 | // a call to the artReadBarrierForRootSlow runtime entry point, |
| 650 | // which is passed the value in location `root`: |
| 651 | // |
| 652 | // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root); |
| 653 | // |
| 654 | // The `out` location contains the value returned by |
| 655 | // artReadBarrierForRootSlow. |
| 656 | void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root); |
| 657 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 658 | void GenerateNop() OVERRIDE; |
| 659 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 660 | void GenerateImplicitNullCheck(HNullCheck* instruction) OVERRIDE; |
| 661 | void GenerateExplicitNullCheck(HNullCheck* instruction) OVERRIDE; |
| 662 | |
| 663 | JumpTableARMVIXL* CreateJumpTable(HPackedSwitch* switch_instr) { |
| 664 | jump_tables_.emplace_back(new (GetGraph()->GetArena()) JumpTableARMVIXL(switch_instr)); |
| 665 | return jump_tables_.back().get(); |
| 666 | } |
| 667 | void EmitJumpTables(); |
| 668 | |
| 669 | void EmitMovwMovtPlaceholder(CodeGeneratorARMVIXL::PcRelativePatchInfo* labels, |
| 670 | vixl::aarch32::Register out); |
| 671 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 672 | private: |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 673 | vixl::aarch32::Register GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke, |
| 674 | vixl::aarch32::Register temp); |
| 675 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 676 | using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, vixl::aarch32::Literal<uint32_t>*>; |
| 677 | using MethodToLiteralMap = |
| 678 | ArenaSafeMap<MethodReference, vixl::aarch32::Literal<uint32_t>*, MethodReferenceComparator>; |
| 679 | |
| 680 | PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file, |
| 681 | uint32_t offset_or_index, |
| 682 | ArenaDeque<PcRelativePatchInfo>* patches); |
| 683 | template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> |
| 684 | static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos, |
| 685 | ArenaVector<LinkerPatch>* linker_patches); |
| 686 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 687 | // Labels for each block that will be compiled. |
| 688 | // We use a deque so that the `vixl::aarch32::Label` objects do not move in memory. |
| 689 | ArenaDeque<vixl::aarch32::Label> block_labels_; // Indexed by block id. |
| 690 | vixl::aarch32::Label frame_entry_label_; |
| 691 | |
Artem Serov | 551b28f | 2016-10-18 19:11:30 +0100 | [diff] [blame] | 692 | ArenaVector<std::unique_ptr<JumpTableARMVIXL>> jump_tables_; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 693 | LocationsBuilderARMVIXL location_builder_; |
| 694 | InstructionCodeGeneratorARMVIXL instruction_visitor_; |
| 695 | ParallelMoveResolverARMVIXL move_resolver_; |
| 696 | |
| 697 | ArmVIXLAssembler assembler_; |
| 698 | const ArmInstructionSetFeatures& isa_features_; |
| 699 | |
Artem Serov | d4cc5b2 | 2016-11-04 11:19:09 +0000 | [diff] [blame] | 700 | // Relative call patch info. |
| 701 | // Using ArenaDeque<> which retains element addresses on push/emplace_back(). |
| 702 | ArenaDeque<PatchInfo<vixl::aarch32::Label>> relative_call_patches_; |
| 703 | // PC-relative patch info for each HArmDexCacheArraysBase. |
| 704 | ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_; |
| 705 | // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC). |
| 706 | ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_; |
| 707 | // PC-relative type patch info. |
| 708 | ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_; |
| 709 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 710 | DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARMVIXL); |
| 711 | }; |
| 712 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 713 | } // namespace arm |
| 714 | } // namespace art |
| 715 | |
| 716 | #endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_ |