Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "code_generator_arm_vixl.h" |
| 18 | |
| 19 | #include "arch/arm/instruction_set_features_arm.h" |
| 20 | #include "art_method.h" |
| 21 | #include "code_generator_utils.h" |
| 22 | #include "common_arm.h" |
| 23 | #include "compiled_method.h" |
| 24 | #include "entrypoints/quick/quick_entrypoints.h" |
| 25 | #include "gc/accounting/card_table.h" |
| 26 | #include "mirror/array-inl.h" |
| 27 | #include "mirror/class-inl.h" |
| 28 | #include "thread.h" |
| 29 | #include "utils/arm/assembler_arm_vixl.h" |
| 30 | #include "utils/arm/managed_register_arm.h" |
| 31 | #include "utils/assembler.h" |
| 32 | #include "utils/stack_checks.h" |
| 33 | |
| 34 | namespace art { |
| 35 | namespace arm { |
| 36 | |
| 37 | namespace vixl32 = vixl::aarch32; |
| 38 | using namespace vixl32; // NOLINT(build/namespaces) |
| 39 | |
Alexandre Rames | b45fbaa5 | 2016-10-17 14:57:13 +0100 | [diff] [blame] | 40 | using helpers::DRegisterFrom; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 41 | using helpers::DWARFReg; |
| 42 | using helpers::FromLowSToD; |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 43 | using helpers::HighDRegisterFrom; |
| 44 | using helpers::HighRegisterFrom; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 45 | using helpers::InputOperandAt; |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 46 | using helpers::InputRegisterAt; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 47 | using helpers::InputSRegisterAt; |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 48 | using helpers::InputVRegisterAt; |
| 49 | using helpers::LocationFrom; |
| 50 | using helpers::LowRegisterFrom; |
| 51 | using helpers::LowSRegisterFrom; |
| 52 | using helpers::OutputRegister; |
| 53 | using helpers::OutputSRegister; |
| 54 | using helpers::OutputVRegister; |
| 55 | using helpers::RegisterFrom; |
| 56 | using helpers::SRegisterFrom; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 57 | |
| 58 | using RegisterList = vixl32::RegisterList; |
| 59 | |
| 60 | static bool ExpectedPairLayout(Location location) { |
| 61 | // We expected this for both core and fpu register pairs. |
| 62 | return ((location.low() & 1) == 0) && (location.low() + 1 == location.high()); |
| 63 | } |
| 64 | |
| 65 | static constexpr size_t kArmInstrMaxSizeInBytes = 4u; |
| 66 | |
| 67 | #ifdef __ |
| 68 | #error "ARM Codegen VIXL macro-assembler macro already defined." |
| 69 | #endif |
| 70 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 71 | // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. |
| 72 | #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT |
| 73 | #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, x).Int32Value() |
| 74 | |
| 75 | // Marker that code is yet to be, and must, be implemented. |
| 76 | #define TODO_VIXL32(level) LOG(level) << __PRETTY_FUNCTION__ << " unimplemented " |
| 77 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 78 | // SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers, |
| 79 | // for each live D registers they treat two corresponding S registers as live ones. |
| 80 | // |
| 81 | // Two following functions (SaveContiguousSRegisterList, RestoreContiguousSRegisterList) build |
| 82 | // from a list of contiguous S registers a list of contiguous D registers (processing first/last |
| 83 | // S registers corner cases) and save/restore this new list treating them as D registers. |
| 84 | // - decreasing code size |
| 85 | // - avoiding hazards on Cortex-A57, when a pair of S registers for an actual live D register is |
| 86 | // restored and then used in regular non SlowPath code as D register. |
| 87 | // |
| 88 | // For the following example (v means the S register is live): |
| 89 | // D names: | D0 | D1 | D2 | D4 | ... |
| 90 | // S names: | S0 | S1 | S2 | S3 | S4 | S5 | S6 | S7 | ... |
| 91 | // Live? | | v | v | v | v | v | v | | ... |
| 92 | // |
| 93 | // S1 and S6 will be saved/restored independently; D registers list (D1, D2) will be processed |
| 94 | // as D registers. |
| 95 | // |
| 96 | // TODO(VIXL): All this code should be unnecessary once the VIXL AArch32 backend provides helpers |
| 97 | // for lists of floating-point registers. |
| 98 | static size_t SaveContiguousSRegisterList(size_t first, |
| 99 | size_t last, |
| 100 | CodeGenerator* codegen, |
| 101 | size_t stack_offset) { |
| 102 | static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes."); |
| 103 | static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes."); |
| 104 | DCHECK_LE(first, last); |
| 105 | if ((first == last) && (first == 0)) { |
| 106 | __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset)); |
| 107 | return stack_offset + kSRegSizeInBytes; |
| 108 | } |
| 109 | if (first % 2 == 1) { |
| 110 | __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset)); |
| 111 | stack_offset += kSRegSizeInBytes; |
| 112 | } |
| 113 | |
| 114 | bool save_last = false; |
| 115 | if (last % 2 == 0) { |
| 116 | save_last = true; |
| 117 | --last; |
| 118 | } |
| 119 | |
| 120 | if (first < last) { |
| 121 | vixl32::DRegister d_reg = vixl32::DRegister(first / 2); |
| 122 | DCHECK_EQ((last - first + 1) % 2, 0u); |
| 123 | size_t number_of_d_regs = (last - first + 1) / 2; |
| 124 | |
| 125 | if (number_of_d_regs == 1) { |
| 126 | __ Vstr(d_reg, MemOperand(sp, stack_offset)); |
| 127 | } else if (number_of_d_regs > 1) { |
| 128 | UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); |
| 129 | vixl32::Register base = sp; |
| 130 | if (stack_offset != 0) { |
| 131 | base = temps.Acquire(); |
| 132 | __ Add(base, sp, stack_offset); |
| 133 | } |
| 134 | __ Vstm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs)); |
| 135 | } |
| 136 | stack_offset += number_of_d_regs * kDRegSizeInBytes; |
| 137 | } |
| 138 | |
| 139 | if (save_last) { |
| 140 | __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset)); |
| 141 | stack_offset += kSRegSizeInBytes; |
| 142 | } |
| 143 | |
| 144 | return stack_offset; |
| 145 | } |
| 146 | |
| 147 | static size_t RestoreContiguousSRegisterList(size_t first, |
| 148 | size_t last, |
| 149 | CodeGenerator* codegen, |
| 150 | size_t stack_offset) { |
| 151 | static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes."); |
| 152 | static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes."); |
| 153 | DCHECK_LE(first, last); |
| 154 | if ((first == last) && (first == 0)) { |
| 155 | __ Vldr(vixl32::SRegister(first), MemOperand(sp, stack_offset)); |
| 156 | return stack_offset + kSRegSizeInBytes; |
| 157 | } |
| 158 | if (first % 2 == 1) { |
| 159 | __ Vldr(vixl32::SRegister(first++), MemOperand(sp, stack_offset)); |
| 160 | stack_offset += kSRegSizeInBytes; |
| 161 | } |
| 162 | |
| 163 | bool restore_last = false; |
| 164 | if (last % 2 == 0) { |
| 165 | restore_last = true; |
| 166 | --last; |
| 167 | } |
| 168 | |
| 169 | if (first < last) { |
| 170 | vixl32::DRegister d_reg = vixl32::DRegister(first / 2); |
| 171 | DCHECK_EQ((last - first + 1) % 2, 0u); |
| 172 | size_t number_of_d_regs = (last - first + 1) / 2; |
| 173 | if (number_of_d_regs == 1) { |
| 174 | __ Vldr(d_reg, MemOperand(sp, stack_offset)); |
| 175 | } else if (number_of_d_regs > 1) { |
| 176 | UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); |
| 177 | vixl32::Register base = sp; |
| 178 | if (stack_offset != 0) { |
| 179 | base = temps.Acquire(); |
| 180 | __ Add(base, sp, stack_offset); |
| 181 | } |
| 182 | __ Vldm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs)); |
| 183 | } |
| 184 | stack_offset += number_of_d_regs * kDRegSizeInBytes; |
| 185 | } |
| 186 | |
| 187 | if (restore_last) { |
| 188 | __ Vldr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset)); |
| 189 | stack_offset += kSRegSizeInBytes; |
| 190 | } |
| 191 | |
| 192 | return stack_offset; |
| 193 | } |
| 194 | |
| 195 | void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { |
| 196 | size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); |
| 197 | size_t orig_offset = stack_offset; |
| 198 | |
| 199 | const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true); |
| 200 | for (uint32_t i : LowToHighBits(core_spills)) { |
| 201 | // If the register holds an object, update the stack mask. |
| 202 | if (locations->RegisterContainsObject(i)) { |
| 203 | locations->SetStackBit(stack_offset / kVRegSize); |
| 204 | } |
| 205 | DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); |
| 206 | DCHECK_LT(i, kMaximumNumberOfExpectedRegisters); |
| 207 | saved_core_stack_offsets_[i] = stack_offset; |
| 208 | stack_offset += kArmWordSize; |
| 209 | } |
| 210 | |
| 211 | CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); |
| 212 | arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset); |
| 213 | |
| 214 | uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false); |
| 215 | orig_offset = stack_offset; |
| 216 | for (uint32_t i : LowToHighBits(fp_spills)) { |
| 217 | DCHECK_LT(i, kMaximumNumberOfExpectedRegisters); |
| 218 | saved_fpu_stack_offsets_[i] = stack_offset; |
| 219 | stack_offset += kArmWordSize; |
| 220 | } |
| 221 | |
| 222 | stack_offset = orig_offset; |
| 223 | while (fp_spills != 0u) { |
| 224 | uint32_t begin = CTZ(fp_spills); |
| 225 | uint32_t tmp = fp_spills + (1u << begin); |
| 226 | fp_spills &= tmp; // Clear the contiguous range of 1s. |
| 227 | uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined. |
| 228 | stack_offset = SaveContiguousSRegisterList(begin, end - 1, codegen, stack_offset); |
| 229 | } |
| 230 | DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); |
| 231 | } |
| 232 | |
| 233 | void SlowPathCodeARMVIXL::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { |
| 234 | size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); |
| 235 | size_t orig_offset = stack_offset; |
| 236 | |
| 237 | const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true); |
| 238 | for (uint32_t i : LowToHighBits(core_spills)) { |
| 239 | DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); |
| 240 | DCHECK_LT(i, kMaximumNumberOfExpectedRegisters); |
| 241 | stack_offset += kArmWordSize; |
| 242 | } |
| 243 | |
| 244 | // TODO(VIXL): Check the coherency of stack_offset after this with a test. |
| 245 | CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); |
| 246 | arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset); |
| 247 | |
| 248 | uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false); |
| 249 | while (fp_spills != 0u) { |
| 250 | uint32_t begin = CTZ(fp_spills); |
| 251 | uint32_t tmp = fp_spills + (1u << begin); |
| 252 | fp_spills &= tmp; // Clear the contiguous range of 1s. |
| 253 | uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined. |
| 254 | stack_offset = RestoreContiguousSRegisterList(begin, end - 1, codegen, stack_offset); |
| 255 | } |
| 256 | DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); |
| 257 | } |
| 258 | |
| 259 | class NullCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL { |
| 260 | public: |
| 261 | explicit NullCheckSlowPathARMVIXL(HNullCheck* instruction) : SlowPathCodeARMVIXL(instruction) {} |
| 262 | |
| 263 | void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { |
| 264 | CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); |
| 265 | __ Bind(GetEntryLabel()); |
| 266 | if (instruction_->CanThrowIntoCatchBlock()) { |
| 267 | // Live registers will be restored in the catch block if caught. |
| 268 | SaveLiveRegisters(codegen, instruction_->GetLocations()); |
| 269 | } |
| 270 | arm_codegen->InvokeRuntime(kQuickThrowNullPointer, |
| 271 | instruction_, |
| 272 | instruction_->GetDexPc(), |
| 273 | this); |
| 274 | CheckEntrypointTypes<kQuickThrowNullPointer, void, void>(); |
| 275 | } |
| 276 | |
| 277 | bool IsFatal() const OVERRIDE { return true; } |
| 278 | |
| 279 | const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARMVIXL"; } |
| 280 | |
| 281 | private: |
| 282 | DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARMVIXL); |
| 283 | }; |
| 284 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 285 | class DivZeroCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL { |
| 286 | public: |
| 287 | explicit DivZeroCheckSlowPathARMVIXL(HDivZeroCheck* instruction) |
| 288 | : SlowPathCodeARMVIXL(instruction) {} |
| 289 | |
| 290 | void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 291 | CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 292 | __ Bind(GetEntryLabel()); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 293 | arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 294 | CheckEntrypointTypes<kQuickThrowDivZero, void, void>(); |
| 295 | } |
| 296 | |
| 297 | bool IsFatal() const OVERRIDE { return true; } |
| 298 | |
| 299 | const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARMVIXL"; } |
| 300 | |
| 301 | private: |
| 302 | DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARMVIXL); |
| 303 | }; |
| 304 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 305 | class SuspendCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL { |
| 306 | public: |
| 307 | SuspendCheckSlowPathARMVIXL(HSuspendCheck* instruction, HBasicBlock* successor) |
| 308 | : SlowPathCodeARMVIXL(instruction), successor_(successor) {} |
| 309 | |
| 310 | void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { |
| 311 | CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); |
| 312 | __ Bind(GetEntryLabel()); |
| 313 | arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this); |
| 314 | CheckEntrypointTypes<kQuickTestSuspend, void, void>(); |
| 315 | if (successor_ == nullptr) { |
| 316 | __ B(GetReturnLabel()); |
| 317 | } else { |
| 318 | __ B(arm_codegen->GetLabelOf(successor_)); |
| 319 | } |
| 320 | } |
| 321 | |
| 322 | vixl32::Label* GetReturnLabel() { |
| 323 | DCHECK(successor_ == nullptr); |
| 324 | return &return_label_; |
| 325 | } |
| 326 | |
| 327 | HBasicBlock* GetSuccessor() const { |
| 328 | return successor_; |
| 329 | } |
| 330 | |
| 331 | const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARMVIXL"; } |
| 332 | |
| 333 | private: |
| 334 | // If not null, the block to branch to after the suspend check. |
| 335 | HBasicBlock* const successor_; |
| 336 | |
| 337 | // If `successor_` is null, the label to branch to after the suspend check. |
| 338 | vixl32::Label return_label_; |
| 339 | |
| 340 | DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARMVIXL); |
| 341 | }; |
| 342 | |
| 343 | class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL { |
| 344 | public: |
| 345 | LoadClassSlowPathARMVIXL(HLoadClass* cls, HInstruction* at, uint32_t dex_pc, bool do_clinit) |
| 346 | : SlowPathCodeARMVIXL(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) { |
| 347 | DCHECK(at->IsLoadClass() || at->IsClinitCheck()); |
| 348 | } |
| 349 | |
| 350 | void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { |
| 351 | LocationSummary* locations = at_->GetLocations(); |
| 352 | |
| 353 | CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); |
| 354 | __ Bind(GetEntryLabel()); |
| 355 | SaveLiveRegisters(codegen, locations); |
| 356 | |
| 357 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 358 | __ Mov(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); |
| 359 | QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage |
| 360 | : kQuickInitializeType; |
| 361 | arm_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this); |
| 362 | if (do_clinit_) { |
| 363 | CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>(); |
| 364 | } else { |
| 365 | CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>(); |
| 366 | } |
| 367 | |
| 368 | // Move the class to the desired location. |
| 369 | Location out = locations->Out(); |
| 370 | if (out.IsValid()) { |
| 371 | DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg())); |
| 372 | arm_codegen->Move32(locations->Out(), LocationFrom(r0)); |
| 373 | } |
| 374 | RestoreLiveRegisters(codegen, locations); |
| 375 | __ B(GetExitLabel()); |
| 376 | } |
| 377 | |
| 378 | const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARMVIXL"; } |
| 379 | |
| 380 | private: |
| 381 | // The class this slow path will load. |
| 382 | HLoadClass* const cls_; |
| 383 | |
| 384 | // The instruction where this slow path is happening. |
| 385 | // (Might be the load class or an initialization check). |
| 386 | HInstruction* const at_; |
| 387 | |
| 388 | // The dex PC of `at_`. |
| 389 | const uint32_t dex_pc_; |
| 390 | |
| 391 | // Whether to initialize the class. |
| 392 | const bool do_clinit_; |
| 393 | |
| 394 | DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARMVIXL); |
| 395 | }; |
| 396 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 397 | inline vixl32::Condition ARMCondition(IfCondition cond) { |
| 398 | switch (cond) { |
| 399 | case kCondEQ: return eq; |
| 400 | case kCondNE: return ne; |
| 401 | case kCondLT: return lt; |
| 402 | case kCondLE: return le; |
| 403 | case kCondGT: return gt; |
| 404 | case kCondGE: return ge; |
| 405 | case kCondB: return lo; |
| 406 | case kCondBE: return ls; |
| 407 | case kCondA: return hi; |
| 408 | case kCondAE: return hs; |
| 409 | } |
| 410 | LOG(FATAL) << "Unreachable"; |
| 411 | UNREACHABLE(); |
| 412 | } |
| 413 | |
| 414 | // Maps signed condition to unsigned condition. |
| 415 | inline vixl32::Condition ARMUnsignedCondition(IfCondition cond) { |
| 416 | switch (cond) { |
| 417 | case kCondEQ: return eq; |
| 418 | case kCondNE: return ne; |
| 419 | // Signed to unsigned. |
| 420 | case kCondLT: return lo; |
| 421 | case kCondLE: return ls; |
| 422 | case kCondGT: return hi; |
| 423 | case kCondGE: return hs; |
| 424 | // Unsigned remain unchanged. |
| 425 | case kCondB: return lo; |
| 426 | case kCondBE: return ls; |
| 427 | case kCondA: return hi; |
| 428 | case kCondAE: return hs; |
| 429 | } |
| 430 | LOG(FATAL) << "Unreachable"; |
| 431 | UNREACHABLE(); |
| 432 | } |
| 433 | |
| 434 | inline vixl32::Condition ARMFPCondition(IfCondition cond, bool gt_bias) { |
| 435 | // The ARM condition codes can express all the necessary branches, see the |
| 436 | // "Meaning (floating-point)" column in the table A8-1 of the ARMv7 reference manual. |
| 437 | // There is no dex instruction or HIR that would need the missing conditions |
| 438 | // "equal or unordered" or "not equal". |
| 439 | switch (cond) { |
| 440 | case kCondEQ: return eq; |
| 441 | case kCondNE: return ne /* unordered */; |
| 442 | case kCondLT: return gt_bias ? cc : lt /* unordered */; |
| 443 | case kCondLE: return gt_bias ? ls : le /* unordered */; |
| 444 | case kCondGT: return gt_bias ? hi /* unordered */ : gt; |
| 445 | case kCondGE: return gt_bias ? cs /* unordered */ : ge; |
| 446 | default: |
| 447 | LOG(FATAL) << "UNREACHABLE"; |
| 448 | UNREACHABLE(); |
| 449 | } |
| 450 | } |
| 451 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 452 | void CodeGeneratorARMVIXL::DumpCoreRegister(std::ostream& stream, int reg) const { |
| 453 | stream << vixl32::Register(reg); |
| 454 | } |
| 455 | |
| 456 | void CodeGeneratorARMVIXL::DumpFloatingPointRegister(std::ostream& stream, int reg) const { |
| 457 | stream << vixl32::SRegister(reg); |
| 458 | } |
| 459 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 460 | static uint32_t ComputeSRegisterListMask(const SRegisterList& regs) { |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 461 | uint32_t mask = 0; |
| 462 | for (uint32_t i = regs.GetFirstSRegister().GetCode(); |
| 463 | i <= regs.GetLastSRegister().GetCode(); |
| 464 | ++i) { |
| 465 | mask |= (1 << i); |
| 466 | } |
| 467 | return mask; |
| 468 | } |
| 469 | |
| 470 | #undef __ |
| 471 | |
| 472 | CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph, |
| 473 | const ArmInstructionSetFeatures& isa_features, |
| 474 | const CompilerOptions& compiler_options, |
| 475 | OptimizingCompilerStats* stats) |
| 476 | : CodeGenerator(graph, |
| 477 | kNumberOfCoreRegisters, |
| 478 | kNumberOfSRegisters, |
| 479 | kNumberOfRegisterPairs, |
| 480 | kCoreCalleeSaves.GetList(), |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 481 | ComputeSRegisterListMask(kFpuCalleeSaves), |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 482 | compiler_options, |
| 483 | stats), |
| 484 | block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), |
| 485 | location_builder_(graph, this), |
| 486 | instruction_visitor_(graph, this), |
| 487 | move_resolver_(graph->GetArena(), this), |
| 488 | assembler_(graph->GetArena()), |
| 489 | isa_features_(isa_features) { |
| 490 | // Always save the LR register to mimic Quick. |
| 491 | AddAllocatedRegister(Location::RegisterLocation(LR)); |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 492 | // Give d14 and d15 as scratch registers to VIXL. |
| 493 | // They are removed from the register allocator in `SetupBlockedRegisters()`. |
| 494 | // TODO(VIXL): We need two scratch D registers for `EmitSwap` when swapping two double stack |
| 495 | // slots. If that is sufficiently rare, and we have pressure on FP registers, we could instead |
| 496 | // spill in `EmitSwap`. But if we actually are guaranteed to have 32 D registers, we could give |
| 497 | // d30 and d31 to VIXL to avoid removing registers from the allocator. If that is the case, we may |
| 498 | // also want to investigate giving those 14 other D registers to the allocator. |
| 499 | GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d14); |
| 500 | GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d15); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 501 | } |
| 502 | |
| 503 | #define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> |
| 504 | |
| 505 | void CodeGeneratorARMVIXL::Finalize(CodeAllocator* allocator) { |
| 506 | GetAssembler()->FinalizeCode(); |
| 507 | CodeGenerator::Finalize(allocator); |
| 508 | } |
| 509 | |
| 510 | void CodeGeneratorARMVIXL::SetupBlockedRegisters() const { |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 511 | // Stack register, LR and PC are always reserved. |
| 512 | blocked_core_registers_[SP] = true; |
| 513 | blocked_core_registers_[LR] = true; |
| 514 | blocked_core_registers_[PC] = true; |
| 515 | |
| 516 | // Reserve thread register. |
| 517 | blocked_core_registers_[TR] = true; |
| 518 | |
| 519 | // Reserve temp register. |
| 520 | blocked_core_registers_[IP] = true; |
| 521 | |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 522 | // Registers s28-s31 (d14-d15) are left to VIXL for scratch registers. |
| 523 | // (They are given to the `MacroAssembler` in `CodeGeneratorARMVIXL::CodeGeneratorARMVIXL`.) |
| 524 | blocked_fpu_registers_[28] = true; |
| 525 | blocked_fpu_registers_[29] = true; |
| 526 | blocked_fpu_registers_[30] = true; |
| 527 | blocked_fpu_registers_[31] = true; |
| 528 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 529 | if (GetGraph()->IsDebuggable()) { |
| 530 | // Stubs do not save callee-save floating point registers. If the graph |
| 531 | // is debuggable, we need to deal with these registers differently. For |
| 532 | // now, just block them. |
| 533 | for (uint32_t i = kFpuCalleeSaves.GetFirstSRegister().GetCode(); |
| 534 | i <= kFpuCalleeSaves.GetLastSRegister().GetCode(); |
| 535 | ++i) { |
| 536 | blocked_fpu_registers_[i] = true; |
| 537 | } |
| 538 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 539 | } |
| 540 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 541 | InstructionCodeGeneratorARMVIXL::InstructionCodeGeneratorARMVIXL(HGraph* graph, |
| 542 | CodeGeneratorARMVIXL* codegen) |
| 543 | : InstructionCodeGenerator(graph, codegen), |
| 544 | assembler_(codegen->GetAssembler()), |
| 545 | codegen_(codegen) {} |
| 546 | |
| 547 | void CodeGeneratorARMVIXL::ComputeSpillMask() { |
| 548 | core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_; |
| 549 | DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved"; |
| 550 | // There is no easy instruction to restore just the PC on thumb2. We spill and |
| 551 | // restore another arbitrary register. |
| 552 | core_spill_mask_ |= (1 << kCoreAlwaysSpillRegister.GetCode()); |
| 553 | fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_; |
| 554 | // We use vpush and vpop for saving and restoring floating point registers, which take |
| 555 | // a SRegister and the number of registers to save/restore after that SRegister. We |
| 556 | // therefore update the `fpu_spill_mask_` to also contain those registers not allocated, |
| 557 | // but in the range. |
| 558 | if (fpu_spill_mask_ != 0) { |
| 559 | uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_); |
| 560 | uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_); |
| 561 | for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) { |
| 562 | fpu_spill_mask_ |= (1 << i); |
| 563 | } |
| 564 | } |
| 565 | } |
| 566 | |
| 567 | void CodeGeneratorARMVIXL::GenerateFrameEntry() { |
| 568 | bool skip_overflow_check = |
| 569 | IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); |
| 570 | DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks()); |
| 571 | __ Bind(&frame_entry_label_); |
| 572 | |
| 573 | if (HasEmptyFrame()) { |
| 574 | return; |
| 575 | } |
| 576 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 577 | if (!skip_overflow_check) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 578 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 579 | vixl32::Register temp = temps.Acquire(); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 580 | __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); |
| 581 | // The load must immediately precede RecordPcInfo. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 582 | AssemblerAccurateScope aas(GetVIXLAssembler(), |
| 583 | kArmInstrMaxSizeInBytes, |
| 584 | CodeBufferCheckScope::kMaximumSize); |
| 585 | __ ldr(temp, MemOperand(temp)); |
| 586 | RecordPcInfo(nullptr, 0); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 587 | } |
| 588 | |
| 589 | __ Push(RegisterList(core_spill_mask_)); |
| 590 | GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask_)); |
| 591 | GetAssembler()->cfi().RelOffsetForMany(DWARFReg(kMethodRegister), |
| 592 | 0, |
| 593 | core_spill_mask_, |
| 594 | kArmWordSize); |
| 595 | if (fpu_spill_mask_ != 0) { |
| 596 | uint32_t first = LeastSignificantBit(fpu_spill_mask_); |
| 597 | |
| 598 | // Check that list is contiguous. |
| 599 | DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_))); |
| 600 | |
| 601 | __ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_))); |
| 602 | GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_)); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 603 | GetAssembler()->cfi().RelOffsetForMany(DWARFReg(s0), 0, fpu_spill_mask_, kArmWordSize); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 604 | } |
| 605 | int adjust = GetFrameSize() - FrameEntrySpillSize(); |
| 606 | __ Sub(sp, sp, adjust); |
| 607 | GetAssembler()->cfi().AdjustCFAOffset(adjust); |
| 608 | GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0); |
| 609 | } |
| 610 | |
| 611 | void CodeGeneratorARMVIXL::GenerateFrameExit() { |
| 612 | if (HasEmptyFrame()) { |
| 613 | __ Bx(lr); |
| 614 | return; |
| 615 | } |
| 616 | GetAssembler()->cfi().RememberState(); |
| 617 | int adjust = GetFrameSize() - FrameEntrySpillSize(); |
| 618 | __ Add(sp, sp, adjust); |
| 619 | GetAssembler()->cfi().AdjustCFAOffset(-adjust); |
| 620 | if (fpu_spill_mask_ != 0) { |
| 621 | uint32_t first = LeastSignificantBit(fpu_spill_mask_); |
| 622 | |
| 623 | // Check that list is contiguous. |
| 624 | DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_))); |
| 625 | |
| 626 | __ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_))); |
| 627 | GetAssembler()->cfi().AdjustCFAOffset( |
| 628 | -static_cast<int>(kArmWordSize) * POPCOUNT(fpu_spill_mask_)); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 629 | GetAssembler()->cfi().RestoreMany(DWARFReg(vixl32::SRegister(0)), fpu_spill_mask_); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 630 | } |
| 631 | // Pop LR into PC to return. |
| 632 | DCHECK_NE(core_spill_mask_ & (1 << kLrCode), 0U); |
| 633 | uint32_t pop_mask = (core_spill_mask_ & (~(1 << kLrCode))) | 1 << kPcCode; |
| 634 | __ Pop(RegisterList(pop_mask)); |
| 635 | GetAssembler()->cfi().RestoreState(); |
| 636 | GetAssembler()->cfi().DefCFAOffset(GetFrameSize()); |
| 637 | } |
| 638 | |
| 639 | void CodeGeneratorARMVIXL::Bind(HBasicBlock* block) { |
| 640 | __ Bind(GetLabelOf(block)); |
| 641 | } |
| 642 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 643 | void CodeGeneratorARMVIXL::Move32(Location destination, Location source) { |
| 644 | if (source.Equals(destination)) { |
| 645 | return; |
| 646 | } |
| 647 | if (destination.IsRegister()) { |
| 648 | if (source.IsRegister()) { |
| 649 | __ Mov(RegisterFrom(destination), RegisterFrom(source)); |
| 650 | } else if (source.IsFpuRegister()) { |
| 651 | __ Vmov(RegisterFrom(destination), SRegisterFrom(source)); |
| 652 | } else { |
| 653 | GetAssembler()->LoadFromOffset(kLoadWord, |
| 654 | RegisterFrom(destination), |
| 655 | sp, |
| 656 | source.GetStackIndex()); |
| 657 | } |
| 658 | } else if (destination.IsFpuRegister()) { |
| 659 | if (source.IsRegister()) { |
| 660 | __ Vmov(SRegisterFrom(destination), RegisterFrom(source)); |
| 661 | } else if (source.IsFpuRegister()) { |
| 662 | __ Vmov(SRegisterFrom(destination), SRegisterFrom(source)); |
| 663 | } else { |
| 664 | GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex()); |
| 665 | } |
| 666 | } else { |
| 667 | DCHECK(destination.IsStackSlot()) << destination; |
| 668 | if (source.IsRegister()) { |
| 669 | GetAssembler()->StoreToOffset(kStoreWord, |
| 670 | RegisterFrom(source), |
| 671 | sp, |
| 672 | destination.GetStackIndex()); |
| 673 | } else if (source.IsFpuRegister()) { |
| 674 | GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex()); |
| 675 | } else { |
| 676 | DCHECK(source.IsStackSlot()) << source; |
| 677 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 678 | vixl32::Register temp = temps.Acquire(); |
| 679 | GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex()); |
| 680 | GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex()); |
| 681 | } |
| 682 | } |
| 683 | } |
| 684 | |
| 685 | void CodeGeneratorARMVIXL::MoveConstant(Location destination ATTRIBUTE_UNUSED, |
| 686 | int32_t value ATTRIBUTE_UNUSED) { |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 687 | TODO_VIXL32(FATAL); |
| 688 | } |
| 689 | |
| 690 | void CodeGeneratorARMVIXL::MoveLocation(Location dst, Location src, Primitive::Type dst_type) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 691 | // TODO(VIXL): Maybe refactor to have the 'move' implementation here and use it in |
| 692 | // `ParallelMoveResolverARMVIXL::EmitMove`, as is done in the `arm64` backend. |
| 693 | HParallelMove move(GetGraph()->GetArena()); |
| 694 | move.AddMove(src, dst, dst_type, nullptr); |
| 695 | GetMoveResolver()->EmitNativeCode(&move); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 696 | } |
| 697 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 698 | void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location ATTRIBUTE_UNUSED, |
| 699 | LocationSummary* locations ATTRIBUTE_UNUSED) { |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 700 | TODO_VIXL32(FATAL); |
| 701 | } |
| 702 | |
| 703 | void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint, |
| 704 | HInstruction* instruction, |
| 705 | uint32_t dex_pc, |
| 706 | SlowPathCode* slow_path) { |
| 707 | ValidateInvokeRuntime(entrypoint, instruction, slow_path); |
| 708 | GenerateInvokeRuntime(GetThreadOffset<kArmPointerSize>(entrypoint).Int32Value()); |
| 709 | if (EntrypointRequiresStackMap(entrypoint)) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 710 | // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the |
| 711 | // previous instruction. |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 712 | RecordPcInfo(instruction, dex_pc, slow_path); |
| 713 | } |
| 714 | } |
| 715 | |
| 716 | void CodeGeneratorARMVIXL::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset, |
| 717 | HInstruction* instruction, |
| 718 | SlowPathCode* slow_path) { |
| 719 | ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path); |
| 720 | GenerateInvokeRuntime(entry_point_offset); |
| 721 | } |
| 722 | |
| 723 | void CodeGeneratorARMVIXL::GenerateInvokeRuntime(int32_t entry_point_offset) { |
| 724 | GetAssembler()->LoadFromOffset(kLoadWord, lr, tr, entry_point_offset); |
| 725 | __ Blx(lr); |
| 726 | } |
| 727 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 728 | void InstructionCodeGeneratorARMVIXL::HandleGoto(HInstruction* got, HBasicBlock* successor) { |
| 729 | DCHECK(!successor->IsExitBlock()); |
| 730 | HBasicBlock* block = got->GetBlock(); |
| 731 | HInstruction* previous = got->GetPrevious(); |
| 732 | HLoopInformation* info = block->GetLoopInformation(); |
| 733 | |
| 734 | if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) { |
| 735 | codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); |
| 736 | GenerateSuspendCheck(info->GetSuspendCheck(), successor); |
| 737 | return; |
| 738 | } |
| 739 | if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { |
| 740 | GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); |
| 741 | } |
| 742 | if (!codegen_->GoesToNextBlock(block, successor)) { |
| 743 | __ B(codegen_->GetLabelOf(successor)); |
| 744 | } |
| 745 | } |
| 746 | |
| 747 | void LocationsBuilderARMVIXL::VisitGoto(HGoto* got) { |
| 748 | got->SetLocations(nullptr); |
| 749 | } |
| 750 | |
| 751 | void InstructionCodeGeneratorARMVIXL::VisitGoto(HGoto* got) { |
| 752 | HandleGoto(got, got->GetSuccessor()); |
| 753 | } |
| 754 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 755 | void LocationsBuilderARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) { |
| 756 | try_boundary->SetLocations(nullptr); |
| 757 | } |
| 758 | |
| 759 | void InstructionCodeGeneratorARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) { |
| 760 | HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor(); |
| 761 | if (!successor->IsExitBlock()) { |
| 762 | HandleGoto(try_boundary, successor); |
| 763 | } |
| 764 | } |
| 765 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 766 | void LocationsBuilderARMVIXL::VisitExit(HExit* exit) { |
| 767 | exit->SetLocations(nullptr); |
| 768 | } |
| 769 | |
| 770 | void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) { |
| 771 | } |
| 772 | |
| 773 | void InstructionCodeGeneratorARMVIXL::GenerateVcmp(HInstruction* instruction) { |
| 774 | Primitive::Type type = instruction->InputAt(0)->GetType(); |
| 775 | Location lhs_loc = instruction->GetLocations()->InAt(0); |
| 776 | Location rhs_loc = instruction->GetLocations()->InAt(1); |
| 777 | if (rhs_loc.IsConstant()) { |
| 778 | // 0.0 is the only immediate that can be encoded directly in |
| 779 | // a VCMP instruction. |
| 780 | // |
| 781 | // Both the JLS (section 15.20.1) and the JVMS (section 6.5) |
| 782 | // specify that in a floating-point comparison, positive zero |
| 783 | // and negative zero are considered equal, so we can use the |
| 784 | // literal 0.0 for both cases here. |
| 785 | // |
| 786 | // Note however that some methods (Float.equal, Float.compare, |
| 787 | // Float.compareTo, Double.equal, Double.compare, |
| 788 | // Double.compareTo, Math.max, Math.min, StrictMath.max, |
| 789 | // StrictMath.min) consider 0.0 to be (strictly) greater than |
| 790 | // -0.0. So if we ever translate calls to these methods into a |
| 791 | // HCompare instruction, we must handle the -0.0 case with |
| 792 | // care here. |
| 793 | DCHECK(rhs_loc.GetConstant()->IsArithmeticZero()); |
| 794 | if (type == Primitive::kPrimFloat) { |
| 795 | __ Vcmp(F32, InputSRegisterAt(instruction, 0), 0.0); |
| 796 | } else { |
| 797 | DCHECK_EQ(type, Primitive::kPrimDouble); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 798 | __ Vcmp(F64, FromLowSToD(LowSRegisterFrom(lhs_loc)), 0.0); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 799 | } |
| 800 | } else { |
| 801 | if (type == Primitive::kPrimFloat) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 802 | __ Vcmp(InputSRegisterAt(instruction, 0), InputSRegisterAt(instruction, 1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 803 | } else { |
| 804 | DCHECK_EQ(type, Primitive::kPrimDouble); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 805 | __ Vcmp(FromLowSToD(LowSRegisterFrom(lhs_loc)), FromLowSToD(LowSRegisterFrom(rhs_loc))); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 806 | } |
| 807 | } |
| 808 | } |
| 809 | |
| 810 | void InstructionCodeGeneratorARMVIXL::GenerateFPJumps(HCondition* cond, |
| 811 | vixl32::Label* true_label, |
| 812 | vixl32::Label* false_label ATTRIBUTE_UNUSED) { |
| 813 | // To branch on the result of the FP compare we transfer FPSCR to APSR (encoded as PC in VMRS). |
| 814 | __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR); |
| 815 | __ B(ARMFPCondition(cond->GetCondition(), cond->IsGtBias()), true_label); |
| 816 | } |
| 817 | |
| 818 | void InstructionCodeGeneratorARMVIXL::GenerateLongComparesAndJumps(HCondition* cond, |
| 819 | vixl32::Label* true_label, |
| 820 | vixl32::Label* false_label) { |
| 821 | LocationSummary* locations = cond->GetLocations(); |
| 822 | Location left = locations->InAt(0); |
| 823 | Location right = locations->InAt(1); |
| 824 | IfCondition if_cond = cond->GetCondition(); |
| 825 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 826 | vixl32::Register left_high = HighRegisterFrom(left); |
| 827 | vixl32::Register left_low = LowRegisterFrom(left); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 828 | IfCondition true_high_cond = if_cond; |
| 829 | IfCondition false_high_cond = cond->GetOppositeCondition(); |
| 830 | vixl32::Condition final_condition = ARMUnsignedCondition(if_cond); // unsigned on lower part |
| 831 | |
| 832 | // Set the conditions for the test, remembering that == needs to be |
| 833 | // decided using the low words. |
| 834 | // TODO: consider avoiding jumps with temporary and CMP low+SBC high |
| 835 | switch (if_cond) { |
| 836 | case kCondEQ: |
| 837 | case kCondNE: |
| 838 | // Nothing to do. |
| 839 | break; |
| 840 | case kCondLT: |
| 841 | false_high_cond = kCondGT; |
| 842 | break; |
| 843 | case kCondLE: |
| 844 | true_high_cond = kCondLT; |
| 845 | break; |
| 846 | case kCondGT: |
| 847 | false_high_cond = kCondLT; |
| 848 | break; |
| 849 | case kCondGE: |
| 850 | true_high_cond = kCondGT; |
| 851 | break; |
| 852 | case kCondB: |
| 853 | false_high_cond = kCondA; |
| 854 | break; |
| 855 | case kCondBE: |
| 856 | true_high_cond = kCondB; |
| 857 | break; |
| 858 | case kCondA: |
| 859 | false_high_cond = kCondB; |
| 860 | break; |
| 861 | case kCondAE: |
| 862 | true_high_cond = kCondA; |
| 863 | break; |
| 864 | } |
| 865 | if (right.IsConstant()) { |
| 866 | int64_t value = right.GetConstant()->AsLongConstant()->GetValue(); |
| 867 | int32_t val_low = Low32Bits(value); |
| 868 | int32_t val_high = High32Bits(value); |
| 869 | |
| 870 | __ Cmp(left_high, val_high); |
| 871 | if (if_cond == kCondNE) { |
| 872 | __ B(ARMCondition(true_high_cond), true_label); |
| 873 | } else if (if_cond == kCondEQ) { |
| 874 | __ B(ARMCondition(false_high_cond), false_label); |
| 875 | } else { |
| 876 | __ B(ARMCondition(true_high_cond), true_label); |
| 877 | __ B(ARMCondition(false_high_cond), false_label); |
| 878 | } |
| 879 | // Must be equal high, so compare the lows. |
| 880 | __ Cmp(left_low, val_low); |
| 881 | } else { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 882 | vixl32::Register right_high = HighRegisterFrom(right); |
| 883 | vixl32::Register right_low = LowRegisterFrom(right); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 884 | |
| 885 | __ Cmp(left_high, right_high); |
| 886 | if (if_cond == kCondNE) { |
| 887 | __ B(ARMCondition(true_high_cond), true_label); |
| 888 | } else if (if_cond == kCondEQ) { |
| 889 | __ B(ARMCondition(false_high_cond), false_label); |
| 890 | } else { |
| 891 | __ B(ARMCondition(true_high_cond), true_label); |
| 892 | __ B(ARMCondition(false_high_cond), false_label); |
| 893 | } |
| 894 | // Must be equal high, so compare the lows. |
| 895 | __ Cmp(left_low, right_low); |
| 896 | } |
| 897 | // The last comparison might be unsigned. |
| 898 | // TODO: optimize cases where this is always true/false |
| 899 | __ B(final_condition, true_label); |
| 900 | } |
| 901 | |
| 902 | void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition, |
| 903 | vixl32::Label* true_target_in, |
| 904 | vixl32::Label* false_target_in) { |
| 905 | // Generated branching requires both targets to be explicit. If either of the |
| 906 | // targets is nullptr (fallthrough) use and bind `fallthrough` instead. |
| 907 | vixl32::Label fallthrough; |
| 908 | vixl32::Label* true_target = (true_target_in == nullptr) ? &fallthrough : true_target_in; |
| 909 | vixl32::Label* false_target = (false_target_in == nullptr) ? &fallthrough : false_target_in; |
| 910 | |
| 911 | Primitive::Type type = condition->InputAt(0)->GetType(); |
| 912 | switch (type) { |
| 913 | case Primitive::kPrimLong: |
| 914 | GenerateLongComparesAndJumps(condition, true_target, false_target); |
| 915 | break; |
| 916 | case Primitive::kPrimFloat: |
| 917 | case Primitive::kPrimDouble: |
| 918 | GenerateVcmp(condition); |
| 919 | GenerateFPJumps(condition, true_target, false_target); |
| 920 | break; |
| 921 | default: |
| 922 | LOG(FATAL) << "Unexpected compare type " << type; |
| 923 | } |
| 924 | |
| 925 | if (false_target != &fallthrough) { |
| 926 | __ B(false_target); |
| 927 | } |
| 928 | |
| 929 | if (true_target_in == nullptr || false_target_in == nullptr) { |
| 930 | __ Bind(&fallthrough); |
| 931 | } |
| 932 | } |
| 933 | |
| 934 | void InstructionCodeGeneratorARMVIXL::GenerateTestAndBranch(HInstruction* instruction, |
| 935 | size_t condition_input_index, |
| 936 | vixl32::Label* true_target, |
| 937 | vixl32::Label* false_target) { |
| 938 | HInstruction* cond = instruction->InputAt(condition_input_index); |
| 939 | |
| 940 | if (true_target == nullptr && false_target == nullptr) { |
| 941 | // Nothing to do. The code always falls through. |
| 942 | return; |
| 943 | } else if (cond->IsIntConstant()) { |
| 944 | // Constant condition, statically compared against "true" (integer value 1). |
| 945 | if (cond->AsIntConstant()->IsTrue()) { |
| 946 | if (true_target != nullptr) { |
| 947 | __ B(true_target); |
| 948 | } |
| 949 | } else { |
| 950 | DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue(); |
| 951 | if (false_target != nullptr) { |
| 952 | __ B(false_target); |
| 953 | } |
| 954 | } |
| 955 | return; |
| 956 | } |
| 957 | |
| 958 | // The following code generates these patterns: |
| 959 | // (1) true_target == nullptr && false_target != nullptr |
| 960 | // - opposite condition true => branch to false_target |
| 961 | // (2) true_target != nullptr && false_target == nullptr |
| 962 | // - condition true => branch to true_target |
| 963 | // (3) true_target != nullptr && false_target != nullptr |
| 964 | // - condition true => branch to true_target |
| 965 | // - branch to false_target |
| 966 | if (IsBooleanValueOrMaterializedCondition(cond)) { |
| 967 | // Condition has been materialized, compare the output to 0. |
| 968 | if (kIsDebugBuild) { |
| 969 | Location cond_val = instruction->GetLocations()->InAt(condition_input_index); |
| 970 | DCHECK(cond_val.IsRegister()); |
| 971 | } |
| 972 | if (true_target == nullptr) { |
| 973 | __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target); |
| 974 | } else { |
| 975 | __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target); |
| 976 | } |
| 977 | } else { |
| 978 | // Condition has not been materialized. Use its inputs as the comparison and |
| 979 | // its condition as the branch condition. |
| 980 | HCondition* condition = cond->AsCondition(); |
| 981 | |
| 982 | // If this is a long or FP comparison that has been folded into |
| 983 | // the HCondition, generate the comparison directly. |
| 984 | Primitive::Type type = condition->InputAt(0)->GetType(); |
| 985 | if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) { |
| 986 | GenerateCompareTestAndBranch(condition, true_target, false_target); |
| 987 | return; |
| 988 | } |
| 989 | |
| 990 | LocationSummary* locations = cond->GetLocations(); |
| 991 | DCHECK(locations->InAt(0).IsRegister()); |
| 992 | vixl32::Register left = InputRegisterAt(cond, 0); |
| 993 | Location right = locations->InAt(1); |
| 994 | if (right.IsRegister()) { |
| 995 | __ Cmp(left, InputRegisterAt(cond, 1)); |
| 996 | } else { |
| 997 | DCHECK(right.IsConstant()); |
| 998 | __ Cmp(left, CodeGenerator::GetInt32ValueOf(right.GetConstant())); |
| 999 | } |
| 1000 | if (true_target == nullptr) { |
| 1001 | __ B(ARMCondition(condition->GetOppositeCondition()), false_target); |
| 1002 | } else { |
| 1003 | __ B(ARMCondition(condition->GetCondition()), true_target); |
| 1004 | } |
| 1005 | } |
| 1006 | |
| 1007 | // If neither branch falls through (case 3), the conditional branch to `true_target` |
| 1008 | // was already emitted (case 2) and we need to emit a jump to `false_target`. |
| 1009 | if (true_target != nullptr && false_target != nullptr) { |
| 1010 | __ B(false_target); |
| 1011 | } |
| 1012 | } |
| 1013 | |
| 1014 | void LocationsBuilderARMVIXL::VisitIf(HIf* if_instr) { |
| 1015 | LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr); |
| 1016 | if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) { |
| 1017 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1018 | } |
| 1019 | } |
| 1020 | |
| 1021 | void InstructionCodeGeneratorARMVIXL::VisitIf(HIf* if_instr) { |
| 1022 | HBasicBlock* true_successor = if_instr->IfTrueSuccessor(); |
| 1023 | HBasicBlock* false_successor = if_instr->IfFalseSuccessor(); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1024 | vixl32::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ? |
| 1025 | nullptr : codegen_->GetLabelOf(true_successor); |
| 1026 | vixl32::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ? |
| 1027 | nullptr : codegen_->GetLabelOf(false_successor); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1028 | GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target); |
| 1029 | } |
| 1030 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1031 | void LocationsBuilderARMVIXL::VisitSelect(HSelect* select) { |
| 1032 | LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select); |
| 1033 | if (Primitive::IsFloatingPointType(select->GetType())) { |
| 1034 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1035 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 1036 | } else { |
| 1037 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1038 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1039 | } |
| 1040 | if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) { |
| 1041 | locations->SetInAt(2, Location::RequiresRegister()); |
| 1042 | } |
| 1043 | locations->SetOut(Location::SameAsFirstInput()); |
| 1044 | } |
| 1045 | |
| 1046 | void InstructionCodeGeneratorARMVIXL::VisitSelect(HSelect* select) { |
| 1047 | LocationSummary* locations = select->GetLocations(); |
| 1048 | vixl32::Label false_target; |
| 1049 | GenerateTestAndBranch(select, |
| 1050 | /* condition_input_index */ 2, |
| 1051 | /* true_target */ nullptr, |
| 1052 | &false_target); |
| 1053 | codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType()); |
| 1054 | __ Bind(&false_target); |
| 1055 | } |
| 1056 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1057 | void CodeGeneratorARMVIXL::GenerateNop() { |
| 1058 | __ Nop(); |
| 1059 | } |
| 1060 | |
| 1061 | void LocationsBuilderARMVIXL::HandleCondition(HCondition* cond) { |
| 1062 | LocationSummary* locations = |
| 1063 | new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall); |
| 1064 | // Handle the long/FP comparisons made in instruction simplification. |
| 1065 | switch (cond->InputAt(0)->GetType()) { |
| 1066 | case Primitive::kPrimLong: |
| 1067 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1068 | locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1))); |
| 1069 | if (!cond->IsEmittedAtUseSite()) { |
| 1070 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 1071 | } |
| 1072 | break; |
| 1073 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1074 | // TODO(VIXL): https://android-review.googlesource.com/#/c/252265/ |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1075 | case Primitive::kPrimFloat: |
| 1076 | case Primitive::kPrimDouble: |
| 1077 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1078 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 1079 | if (!cond->IsEmittedAtUseSite()) { |
| 1080 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1081 | } |
| 1082 | break; |
| 1083 | |
| 1084 | default: |
| 1085 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1086 | locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1))); |
| 1087 | if (!cond->IsEmittedAtUseSite()) { |
| 1088 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1089 | } |
| 1090 | } |
| 1091 | } |
| 1092 | |
| 1093 | void InstructionCodeGeneratorARMVIXL::HandleCondition(HCondition* cond) { |
| 1094 | if (cond->IsEmittedAtUseSite()) { |
| 1095 | return; |
| 1096 | } |
| 1097 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1098 | vixl32::Register out = OutputRegister(cond); |
| 1099 | vixl32::Label true_label, false_label; |
| 1100 | |
| 1101 | switch (cond->InputAt(0)->GetType()) { |
| 1102 | default: { |
| 1103 | // Integer case. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1104 | __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1)); |
| 1105 | AssemblerAccurateScope aas(GetVIXLAssembler(), |
| 1106 | kArmInstrMaxSizeInBytes * 3u, |
| 1107 | CodeBufferCheckScope::kMaximumSize); |
| 1108 | __ ite(ARMCondition(cond->GetCondition())); |
| 1109 | __ mov(ARMCondition(cond->GetCondition()), OutputRegister(cond), 1); |
| 1110 | __ mov(ARMCondition(cond->GetOppositeCondition()), OutputRegister(cond), 0); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1111 | return; |
| 1112 | } |
| 1113 | case Primitive::kPrimLong: |
| 1114 | GenerateLongComparesAndJumps(cond, &true_label, &false_label); |
| 1115 | break; |
| 1116 | case Primitive::kPrimFloat: |
| 1117 | case Primitive::kPrimDouble: |
| 1118 | GenerateVcmp(cond); |
| 1119 | GenerateFPJumps(cond, &true_label, &false_label); |
| 1120 | break; |
| 1121 | } |
| 1122 | |
| 1123 | // Convert the jumps into the result. |
| 1124 | vixl32::Label done_label; |
| 1125 | |
| 1126 | // False case: result = 0. |
| 1127 | __ Bind(&false_label); |
| 1128 | __ Mov(out, 0); |
| 1129 | __ B(&done_label); |
| 1130 | |
| 1131 | // True case: result = 1. |
| 1132 | __ Bind(&true_label); |
| 1133 | __ Mov(out, 1); |
| 1134 | __ Bind(&done_label); |
| 1135 | } |
| 1136 | |
| 1137 | void LocationsBuilderARMVIXL::VisitEqual(HEqual* comp) { |
| 1138 | HandleCondition(comp); |
| 1139 | } |
| 1140 | |
| 1141 | void InstructionCodeGeneratorARMVIXL::VisitEqual(HEqual* comp) { |
| 1142 | HandleCondition(comp); |
| 1143 | } |
| 1144 | |
| 1145 | void LocationsBuilderARMVIXL::VisitNotEqual(HNotEqual* comp) { |
| 1146 | HandleCondition(comp); |
| 1147 | } |
| 1148 | |
| 1149 | void InstructionCodeGeneratorARMVIXL::VisitNotEqual(HNotEqual* comp) { |
| 1150 | HandleCondition(comp); |
| 1151 | } |
| 1152 | |
| 1153 | void LocationsBuilderARMVIXL::VisitLessThan(HLessThan* comp) { |
| 1154 | HandleCondition(comp); |
| 1155 | } |
| 1156 | |
| 1157 | void InstructionCodeGeneratorARMVIXL::VisitLessThan(HLessThan* comp) { |
| 1158 | HandleCondition(comp); |
| 1159 | } |
| 1160 | |
| 1161 | void LocationsBuilderARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) { |
| 1162 | HandleCondition(comp); |
| 1163 | } |
| 1164 | |
| 1165 | void InstructionCodeGeneratorARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) { |
| 1166 | HandleCondition(comp); |
| 1167 | } |
| 1168 | |
| 1169 | void LocationsBuilderARMVIXL::VisitGreaterThan(HGreaterThan* comp) { |
| 1170 | HandleCondition(comp); |
| 1171 | } |
| 1172 | |
| 1173 | void InstructionCodeGeneratorARMVIXL::VisitGreaterThan(HGreaterThan* comp) { |
| 1174 | HandleCondition(comp); |
| 1175 | } |
| 1176 | |
| 1177 | void LocationsBuilderARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { |
| 1178 | HandleCondition(comp); |
| 1179 | } |
| 1180 | |
| 1181 | void InstructionCodeGeneratorARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { |
| 1182 | HandleCondition(comp); |
| 1183 | } |
| 1184 | |
| 1185 | void LocationsBuilderARMVIXL::VisitBelow(HBelow* comp) { |
| 1186 | HandleCondition(comp); |
| 1187 | } |
| 1188 | |
| 1189 | void InstructionCodeGeneratorARMVIXL::VisitBelow(HBelow* comp) { |
| 1190 | HandleCondition(comp); |
| 1191 | } |
| 1192 | |
| 1193 | void LocationsBuilderARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) { |
| 1194 | HandleCondition(comp); |
| 1195 | } |
| 1196 | |
| 1197 | void InstructionCodeGeneratorARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) { |
| 1198 | HandleCondition(comp); |
| 1199 | } |
| 1200 | |
| 1201 | void LocationsBuilderARMVIXL::VisitAbove(HAbove* comp) { |
| 1202 | HandleCondition(comp); |
| 1203 | } |
| 1204 | |
| 1205 | void InstructionCodeGeneratorARMVIXL::VisitAbove(HAbove* comp) { |
| 1206 | HandleCondition(comp); |
| 1207 | } |
| 1208 | |
| 1209 | void LocationsBuilderARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) { |
| 1210 | HandleCondition(comp); |
| 1211 | } |
| 1212 | |
| 1213 | void InstructionCodeGeneratorARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) { |
| 1214 | HandleCondition(comp); |
| 1215 | } |
| 1216 | |
| 1217 | void LocationsBuilderARMVIXL::VisitIntConstant(HIntConstant* constant) { |
| 1218 | LocationSummary* locations = |
| 1219 | new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); |
| 1220 | locations->SetOut(Location::ConstantLocation(constant)); |
| 1221 | } |
| 1222 | |
| 1223 | void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) { |
| 1224 | // Will be generated at use site. |
| 1225 | } |
| 1226 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1227 | void LocationsBuilderARMVIXL::VisitNullConstant(HNullConstant* constant) { |
| 1228 | LocationSummary* locations = |
| 1229 | new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); |
| 1230 | locations->SetOut(Location::ConstantLocation(constant)); |
| 1231 | } |
| 1232 | |
| 1233 | void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) { |
| 1234 | // Will be generated at use site. |
| 1235 | } |
| 1236 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1237 | void LocationsBuilderARMVIXL::VisitLongConstant(HLongConstant* constant) { |
| 1238 | LocationSummary* locations = |
| 1239 | new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); |
| 1240 | locations->SetOut(Location::ConstantLocation(constant)); |
| 1241 | } |
| 1242 | |
| 1243 | void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) { |
| 1244 | // Will be generated at use site. |
| 1245 | } |
| 1246 | |
Alexandre Rames | b45fbaa5 | 2016-10-17 14:57:13 +0100 | [diff] [blame] | 1247 | void LocationsBuilderARMVIXL::VisitFloatConstant(HFloatConstant* constant) { |
| 1248 | LocationSummary* locations = |
| 1249 | new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); |
| 1250 | locations->SetOut(Location::ConstantLocation(constant)); |
| 1251 | } |
| 1252 | |
| 1253 | void InstructionCodeGeneratorARMVIXL::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) { |
| 1254 | // Will be generated at use site. |
| 1255 | } |
| 1256 | |
| 1257 | void LocationsBuilderARMVIXL::VisitDoubleConstant(HDoubleConstant* constant) { |
| 1258 | LocationSummary* locations = |
| 1259 | new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); |
| 1260 | locations->SetOut(Location::ConstantLocation(constant)); |
| 1261 | } |
| 1262 | |
| 1263 | void InstructionCodeGeneratorARMVIXL::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) { |
| 1264 | // Will be generated at use site. |
| 1265 | } |
| 1266 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1267 | void LocationsBuilderARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) { |
| 1268 | memory_barrier->SetLocations(nullptr); |
| 1269 | } |
| 1270 | |
| 1271 | void InstructionCodeGeneratorARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) { |
| 1272 | codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind()); |
| 1273 | } |
| 1274 | |
| 1275 | void LocationsBuilderARMVIXL::VisitReturnVoid(HReturnVoid* ret) { |
| 1276 | ret->SetLocations(nullptr); |
| 1277 | } |
| 1278 | |
| 1279 | void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) { |
| 1280 | codegen_->GenerateFrameExit(); |
| 1281 | } |
| 1282 | |
| 1283 | void LocationsBuilderARMVIXL::VisitReturn(HReturn* ret) { |
| 1284 | LocationSummary* locations = |
| 1285 | new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall); |
| 1286 | locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType())); |
| 1287 | } |
| 1288 | |
| 1289 | void InstructionCodeGeneratorARMVIXL::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) { |
| 1290 | codegen_->GenerateFrameExit(); |
| 1291 | } |
| 1292 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1293 | void LocationsBuilderARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { |
| 1294 | // Explicit clinit checks triggered by static invokes must have been pruned by |
| 1295 | // art::PrepareForRegisterAllocation. |
| 1296 | DCHECK(!invoke->IsStaticWithExplicitClinitCheck()); |
| 1297 | |
| 1298 | // TODO(VIXL): TryDispatch |
| 1299 | |
| 1300 | HandleInvoke(invoke); |
| 1301 | } |
| 1302 | |
| 1303 | void InstructionCodeGeneratorARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { |
| 1304 | // Explicit clinit checks triggered by static invokes must have been pruned by |
| 1305 | // art::PrepareForRegisterAllocation. |
| 1306 | DCHECK(!invoke->IsStaticWithExplicitClinitCheck()); |
| 1307 | |
| 1308 | // TODO(VIXL): TryGenerateIntrinsicCode |
| 1309 | |
| 1310 | LocationSummary* locations = invoke->GetLocations(); |
| 1311 | DCHECK(locations->HasTemps()); |
| 1312 | codegen_->GenerateStaticOrDirectCall(invoke, locations->GetTemp(0)); |
| 1313 | // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the |
| 1314 | // previous instruction. |
| 1315 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
| 1316 | } |
| 1317 | |
| 1318 | void LocationsBuilderARMVIXL::HandleInvoke(HInvoke* invoke) { |
| 1319 | InvokeDexCallingConventionVisitorARM calling_convention_visitor; |
| 1320 | CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor); |
| 1321 | } |
| 1322 | |
| 1323 | void LocationsBuilderARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) { |
| 1324 | // TODO(VIXL): TryDispatch |
| 1325 | |
| 1326 | HandleInvoke(invoke); |
| 1327 | } |
| 1328 | |
| 1329 | void InstructionCodeGeneratorARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) { |
| 1330 | // TODO(VIXL): TryGenerateIntrinsicCode |
| 1331 | |
| 1332 | codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0)); |
| 1333 | DCHECK(!codegen_->IsLeafMethod()); |
| 1334 | // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the |
| 1335 | // previous instruction. |
| 1336 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
| 1337 | } |
| 1338 | |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 1339 | void LocationsBuilderARMVIXL::VisitNeg(HNeg* neg) { |
| 1340 | LocationSummary* locations = |
| 1341 | new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); |
| 1342 | switch (neg->GetResultType()) { |
| 1343 | case Primitive::kPrimInt: { |
| 1344 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1345 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1346 | break; |
| 1347 | } |
| 1348 | case Primitive::kPrimLong: { |
| 1349 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1350 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 1351 | break; |
| 1352 | } |
| 1353 | |
| 1354 | case Primitive::kPrimFloat: |
| 1355 | case Primitive::kPrimDouble: |
| 1356 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1357 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 1358 | break; |
| 1359 | |
| 1360 | default: |
| 1361 | LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); |
| 1362 | } |
| 1363 | } |
| 1364 | |
| 1365 | void InstructionCodeGeneratorARMVIXL::VisitNeg(HNeg* neg) { |
| 1366 | LocationSummary* locations = neg->GetLocations(); |
| 1367 | Location out = locations->Out(); |
| 1368 | Location in = locations->InAt(0); |
| 1369 | switch (neg->GetResultType()) { |
| 1370 | case Primitive::kPrimInt: |
| 1371 | __ Rsb(OutputRegister(neg), InputRegisterAt(neg, 0), 0); |
| 1372 | break; |
| 1373 | |
| 1374 | case Primitive::kPrimLong: |
| 1375 | // out.lo = 0 - in.lo (and update the carry/borrow (C) flag) |
| 1376 | __ Rsbs(LowRegisterFrom(out), LowRegisterFrom(in), 0); |
| 1377 | // We cannot emit an RSC (Reverse Subtract with Carry) |
| 1378 | // instruction here, as it does not exist in the Thumb-2 |
| 1379 | // instruction set. We use the following approach |
| 1380 | // using SBC and SUB instead. |
| 1381 | // |
| 1382 | // out.hi = -C |
| 1383 | __ Sbc(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(out)); |
| 1384 | // out.hi = out.hi - in.hi |
| 1385 | __ Sub(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(in)); |
| 1386 | break; |
| 1387 | |
| 1388 | case Primitive::kPrimFloat: |
| 1389 | case Primitive::kPrimDouble: |
| 1390 | __ Vneg(OutputVRegister(neg), InputVRegisterAt(neg, 0)); |
| 1391 | break; |
| 1392 | |
| 1393 | default: |
| 1394 | LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); |
| 1395 | } |
| 1396 | } |
| 1397 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1398 | void LocationsBuilderARMVIXL::VisitTypeConversion(HTypeConversion* conversion) { |
| 1399 | Primitive::Type result_type = conversion->GetResultType(); |
| 1400 | Primitive::Type input_type = conversion->GetInputType(); |
| 1401 | DCHECK_NE(result_type, input_type); |
| 1402 | |
| 1403 | // The float-to-long, double-to-long and long-to-float type conversions |
| 1404 | // rely on a call to the runtime. |
| 1405 | LocationSummary::CallKind call_kind = |
| 1406 | (((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble) |
| 1407 | && result_type == Primitive::kPrimLong) |
| 1408 | || (input_type == Primitive::kPrimLong && result_type == Primitive::kPrimFloat)) |
| 1409 | ? LocationSummary::kCallOnMainOnly |
| 1410 | : LocationSummary::kNoCall; |
| 1411 | LocationSummary* locations = |
| 1412 | new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind); |
| 1413 | |
| 1414 | // The Java language does not allow treating boolean as an integral type but |
| 1415 | // our bit representation makes it safe. |
| 1416 | |
| 1417 | switch (result_type) { |
| 1418 | case Primitive::kPrimByte: |
| 1419 | switch (input_type) { |
| 1420 | case Primitive::kPrimLong: |
| 1421 | // Type conversion from long to byte is a result of code transformations. |
| 1422 | case Primitive::kPrimBoolean: |
| 1423 | // Boolean input is a result of code transformations. |
| 1424 | case Primitive::kPrimShort: |
| 1425 | case Primitive::kPrimInt: |
| 1426 | case Primitive::kPrimChar: |
| 1427 | // Processing a Dex `int-to-byte' instruction. |
| 1428 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1429 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1430 | break; |
| 1431 | |
| 1432 | default: |
| 1433 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1434 | << " to " << result_type; |
| 1435 | } |
| 1436 | break; |
| 1437 | |
| 1438 | case Primitive::kPrimShort: |
| 1439 | switch (input_type) { |
| 1440 | case Primitive::kPrimLong: |
| 1441 | // Type conversion from long to short is a result of code transformations. |
| 1442 | case Primitive::kPrimBoolean: |
| 1443 | // Boolean input is a result of code transformations. |
| 1444 | case Primitive::kPrimByte: |
| 1445 | case Primitive::kPrimInt: |
| 1446 | case Primitive::kPrimChar: |
| 1447 | // Processing a Dex `int-to-short' instruction. |
| 1448 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1449 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1450 | break; |
| 1451 | |
| 1452 | default: |
| 1453 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1454 | << " to " << result_type; |
| 1455 | } |
| 1456 | break; |
| 1457 | |
| 1458 | case Primitive::kPrimInt: |
| 1459 | switch (input_type) { |
| 1460 | case Primitive::kPrimLong: |
| 1461 | // Processing a Dex `long-to-int' instruction. |
| 1462 | locations->SetInAt(0, Location::Any()); |
| 1463 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1464 | break; |
| 1465 | |
| 1466 | case Primitive::kPrimFloat: |
| 1467 | // Processing a Dex `float-to-int' instruction. |
| 1468 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1469 | locations->SetOut(Location::RequiresRegister()); |
| 1470 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 1471 | break; |
| 1472 | |
| 1473 | case Primitive::kPrimDouble: |
| 1474 | // Processing a Dex `double-to-int' instruction. |
| 1475 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1476 | locations->SetOut(Location::RequiresRegister()); |
| 1477 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 1478 | break; |
| 1479 | |
| 1480 | default: |
| 1481 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1482 | << " to " << result_type; |
| 1483 | } |
| 1484 | break; |
| 1485 | |
| 1486 | case Primitive::kPrimLong: |
| 1487 | switch (input_type) { |
| 1488 | case Primitive::kPrimBoolean: |
| 1489 | // Boolean input is a result of code transformations. |
| 1490 | case Primitive::kPrimByte: |
| 1491 | case Primitive::kPrimShort: |
| 1492 | case Primitive::kPrimInt: |
| 1493 | case Primitive::kPrimChar: |
| 1494 | // Processing a Dex `int-to-long' instruction. |
| 1495 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1496 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1497 | break; |
| 1498 | |
| 1499 | case Primitive::kPrimFloat: { |
| 1500 | // Processing a Dex `float-to-long' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1501 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 1502 | locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0))); |
| 1503 | locations->SetOut(LocationFrom(r0, r1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1504 | break; |
| 1505 | } |
| 1506 | |
| 1507 | case Primitive::kPrimDouble: { |
| 1508 | // Processing a Dex `double-to-long' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1509 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 1510 | locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0), |
| 1511 | calling_convention.GetFpuRegisterAt(1))); |
| 1512 | locations->SetOut(LocationFrom(r0, r1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1513 | break; |
| 1514 | } |
| 1515 | |
| 1516 | default: |
| 1517 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1518 | << " to " << result_type; |
| 1519 | } |
| 1520 | break; |
| 1521 | |
| 1522 | case Primitive::kPrimChar: |
| 1523 | switch (input_type) { |
| 1524 | case Primitive::kPrimLong: |
| 1525 | // Type conversion from long to char is a result of code transformations. |
| 1526 | case Primitive::kPrimBoolean: |
| 1527 | // Boolean input is a result of code transformations. |
| 1528 | case Primitive::kPrimByte: |
| 1529 | case Primitive::kPrimShort: |
| 1530 | case Primitive::kPrimInt: |
| 1531 | // Processing a Dex `int-to-char' instruction. |
| 1532 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1533 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1534 | break; |
| 1535 | |
| 1536 | default: |
| 1537 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1538 | << " to " << result_type; |
| 1539 | } |
| 1540 | break; |
| 1541 | |
| 1542 | case Primitive::kPrimFloat: |
| 1543 | switch (input_type) { |
| 1544 | case Primitive::kPrimBoolean: |
| 1545 | // Boolean input is a result of code transformations. |
| 1546 | case Primitive::kPrimByte: |
| 1547 | case Primitive::kPrimShort: |
| 1548 | case Primitive::kPrimInt: |
| 1549 | case Primitive::kPrimChar: |
| 1550 | // Processing a Dex `int-to-float' instruction. |
| 1551 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1552 | locations->SetOut(Location::RequiresFpuRegister()); |
| 1553 | break; |
| 1554 | |
| 1555 | case Primitive::kPrimLong: { |
| 1556 | // Processing a Dex `long-to-float' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1557 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 1558 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0), |
| 1559 | calling_convention.GetRegisterAt(1))); |
| 1560 | locations->SetOut(LocationFrom(calling_convention.GetFpuRegisterAt(0))); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1561 | break; |
| 1562 | } |
| 1563 | |
| 1564 | case Primitive::kPrimDouble: |
| 1565 | // Processing a Dex `double-to-float' instruction. |
| 1566 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1567 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 1568 | break; |
| 1569 | |
| 1570 | default: |
| 1571 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1572 | << " to " << result_type; |
| 1573 | }; |
| 1574 | break; |
| 1575 | |
| 1576 | case Primitive::kPrimDouble: |
| 1577 | switch (input_type) { |
| 1578 | case Primitive::kPrimBoolean: |
| 1579 | // Boolean input is a result of code transformations. |
| 1580 | case Primitive::kPrimByte: |
| 1581 | case Primitive::kPrimShort: |
| 1582 | case Primitive::kPrimInt: |
| 1583 | case Primitive::kPrimChar: |
| 1584 | // Processing a Dex `int-to-double' instruction. |
| 1585 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1586 | locations->SetOut(Location::RequiresFpuRegister()); |
| 1587 | break; |
| 1588 | |
| 1589 | case Primitive::kPrimLong: |
| 1590 | // Processing a Dex `long-to-double' instruction. |
| 1591 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1592 | locations->SetOut(Location::RequiresFpuRegister()); |
| 1593 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 1594 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 1595 | break; |
| 1596 | |
| 1597 | case Primitive::kPrimFloat: |
| 1598 | // Processing a Dex `float-to-double' instruction. |
| 1599 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1600 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 1601 | break; |
| 1602 | |
| 1603 | default: |
| 1604 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1605 | << " to " << result_type; |
| 1606 | }; |
| 1607 | break; |
| 1608 | |
| 1609 | default: |
| 1610 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1611 | << " to " << result_type; |
| 1612 | } |
| 1613 | } |
| 1614 | |
| 1615 | void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conversion) { |
| 1616 | LocationSummary* locations = conversion->GetLocations(); |
| 1617 | Location out = locations->Out(); |
| 1618 | Location in = locations->InAt(0); |
| 1619 | Primitive::Type result_type = conversion->GetResultType(); |
| 1620 | Primitive::Type input_type = conversion->GetInputType(); |
| 1621 | DCHECK_NE(result_type, input_type); |
| 1622 | switch (result_type) { |
| 1623 | case Primitive::kPrimByte: |
| 1624 | switch (input_type) { |
| 1625 | case Primitive::kPrimLong: |
| 1626 | // Type conversion from long to byte is a result of code transformations. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1627 | __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1628 | break; |
| 1629 | case Primitive::kPrimBoolean: |
| 1630 | // Boolean input is a result of code transformations. |
| 1631 | case Primitive::kPrimShort: |
| 1632 | case Primitive::kPrimInt: |
| 1633 | case Primitive::kPrimChar: |
| 1634 | // Processing a Dex `int-to-byte' instruction. |
| 1635 | __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8); |
| 1636 | break; |
| 1637 | |
| 1638 | default: |
| 1639 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1640 | << " to " << result_type; |
| 1641 | } |
| 1642 | break; |
| 1643 | |
| 1644 | case Primitive::kPrimShort: |
| 1645 | switch (input_type) { |
| 1646 | case Primitive::kPrimLong: |
| 1647 | // Type conversion from long to short is a result of code transformations. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1648 | __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1649 | break; |
| 1650 | case Primitive::kPrimBoolean: |
| 1651 | // Boolean input is a result of code transformations. |
| 1652 | case Primitive::kPrimByte: |
| 1653 | case Primitive::kPrimInt: |
| 1654 | case Primitive::kPrimChar: |
| 1655 | // Processing a Dex `int-to-short' instruction. |
| 1656 | __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16); |
| 1657 | break; |
| 1658 | |
| 1659 | default: |
| 1660 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1661 | << " to " << result_type; |
| 1662 | } |
| 1663 | break; |
| 1664 | |
| 1665 | case Primitive::kPrimInt: |
| 1666 | switch (input_type) { |
| 1667 | case Primitive::kPrimLong: |
| 1668 | // Processing a Dex `long-to-int' instruction. |
| 1669 | DCHECK(out.IsRegister()); |
| 1670 | if (in.IsRegisterPair()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1671 | __ Mov(OutputRegister(conversion), LowRegisterFrom(in)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1672 | } else if (in.IsDoubleStackSlot()) { |
| 1673 | GetAssembler()->LoadFromOffset(kLoadWord, |
| 1674 | OutputRegister(conversion), |
| 1675 | sp, |
| 1676 | in.GetStackIndex()); |
| 1677 | } else { |
| 1678 | DCHECK(in.IsConstant()); |
| 1679 | DCHECK(in.GetConstant()->IsLongConstant()); |
| 1680 | int64_t value = in.GetConstant()->AsLongConstant()->GetValue(); |
| 1681 | __ Mov(OutputRegister(conversion), static_cast<int32_t>(value)); |
| 1682 | } |
| 1683 | break; |
| 1684 | |
| 1685 | case Primitive::kPrimFloat: { |
| 1686 | // Processing a Dex `float-to-int' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1687 | vixl32::SRegister temp = LowSRegisterFrom(locations->GetTemp(0)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1688 | __ Vcvt(I32, F32, temp, InputSRegisterAt(conversion, 0)); |
| 1689 | __ Vmov(OutputRegister(conversion), temp); |
| 1690 | break; |
| 1691 | } |
| 1692 | |
| 1693 | case Primitive::kPrimDouble: { |
| 1694 | // Processing a Dex `double-to-int' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1695 | vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0)); |
| 1696 | __ Vcvt(I32, F64, temp_s, FromLowSToD(LowSRegisterFrom(in))); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1697 | __ Vmov(OutputRegister(conversion), temp_s); |
| 1698 | break; |
| 1699 | } |
| 1700 | |
| 1701 | default: |
| 1702 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1703 | << " to " << result_type; |
| 1704 | } |
| 1705 | break; |
| 1706 | |
| 1707 | case Primitive::kPrimLong: |
| 1708 | switch (input_type) { |
| 1709 | case Primitive::kPrimBoolean: |
| 1710 | // Boolean input is a result of code transformations. |
| 1711 | case Primitive::kPrimByte: |
| 1712 | case Primitive::kPrimShort: |
| 1713 | case Primitive::kPrimInt: |
| 1714 | case Primitive::kPrimChar: |
| 1715 | // Processing a Dex `int-to-long' instruction. |
| 1716 | DCHECK(out.IsRegisterPair()); |
| 1717 | DCHECK(in.IsRegister()); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1718 | __ Mov(LowRegisterFrom(out), InputRegisterAt(conversion, 0)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1719 | // Sign extension. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1720 | __ Asr(HighRegisterFrom(out), LowRegisterFrom(out), 31); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1721 | break; |
| 1722 | |
| 1723 | case Primitive::kPrimFloat: |
| 1724 | // Processing a Dex `float-to-long' instruction. |
| 1725 | codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc()); |
| 1726 | CheckEntrypointTypes<kQuickF2l, int64_t, float>(); |
| 1727 | break; |
| 1728 | |
| 1729 | case Primitive::kPrimDouble: |
| 1730 | // Processing a Dex `double-to-long' instruction. |
| 1731 | codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc()); |
| 1732 | CheckEntrypointTypes<kQuickD2l, int64_t, double>(); |
| 1733 | break; |
| 1734 | |
| 1735 | default: |
| 1736 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1737 | << " to " << result_type; |
| 1738 | } |
| 1739 | break; |
| 1740 | |
| 1741 | case Primitive::kPrimChar: |
| 1742 | switch (input_type) { |
| 1743 | case Primitive::kPrimLong: |
| 1744 | // Type conversion from long to char is a result of code transformations. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1745 | __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1746 | break; |
| 1747 | case Primitive::kPrimBoolean: |
| 1748 | // Boolean input is a result of code transformations. |
| 1749 | case Primitive::kPrimByte: |
| 1750 | case Primitive::kPrimShort: |
| 1751 | case Primitive::kPrimInt: |
| 1752 | // Processing a Dex `int-to-char' instruction. |
| 1753 | __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16); |
| 1754 | break; |
| 1755 | |
| 1756 | default: |
| 1757 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1758 | << " to " << result_type; |
| 1759 | } |
| 1760 | break; |
| 1761 | |
| 1762 | case Primitive::kPrimFloat: |
| 1763 | switch (input_type) { |
| 1764 | case Primitive::kPrimBoolean: |
| 1765 | // Boolean input is a result of code transformations. |
| 1766 | case Primitive::kPrimByte: |
| 1767 | case Primitive::kPrimShort: |
| 1768 | case Primitive::kPrimInt: |
| 1769 | case Primitive::kPrimChar: { |
| 1770 | // Processing a Dex `int-to-float' instruction. |
| 1771 | __ Vmov(OutputSRegister(conversion), InputRegisterAt(conversion, 0)); |
| 1772 | __ Vcvt(F32, I32, OutputSRegister(conversion), OutputSRegister(conversion)); |
| 1773 | break; |
| 1774 | } |
| 1775 | |
| 1776 | case Primitive::kPrimLong: |
| 1777 | // Processing a Dex `long-to-float' instruction. |
| 1778 | codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc()); |
| 1779 | CheckEntrypointTypes<kQuickL2f, float, int64_t>(); |
| 1780 | break; |
| 1781 | |
| 1782 | case Primitive::kPrimDouble: |
| 1783 | // Processing a Dex `double-to-float' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1784 | __ Vcvt(F32, F64, OutputSRegister(conversion), FromLowSToD(LowSRegisterFrom(in))); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1785 | break; |
| 1786 | |
| 1787 | default: |
| 1788 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1789 | << " to " << result_type; |
| 1790 | }; |
| 1791 | break; |
| 1792 | |
| 1793 | case Primitive::kPrimDouble: |
| 1794 | switch (input_type) { |
| 1795 | case Primitive::kPrimBoolean: |
| 1796 | // Boolean input is a result of code transformations. |
| 1797 | case Primitive::kPrimByte: |
| 1798 | case Primitive::kPrimShort: |
| 1799 | case Primitive::kPrimInt: |
| 1800 | case Primitive::kPrimChar: { |
| 1801 | // Processing a Dex `int-to-double' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1802 | __ Vmov(LowSRegisterFrom(out), InputRegisterAt(conversion, 0)); |
| 1803 | __ Vcvt(F64, I32, FromLowSToD(LowSRegisterFrom(out)), LowSRegisterFrom(out)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1804 | break; |
| 1805 | } |
| 1806 | |
| 1807 | case Primitive::kPrimLong: { |
| 1808 | // Processing a Dex `long-to-double' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1809 | vixl32::Register low = LowRegisterFrom(in); |
| 1810 | vixl32::Register high = HighRegisterFrom(in); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1811 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1812 | vixl32::SRegister out_s = LowSRegisterFrom(out); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1813 | vixl32::DRegister out_d = FromLowSToD(out_s); |
| 1814 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1815 | vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1816 | vixl32::DRegister temp_d = FromLowSToD(temp_s); |
| 1817 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1818 | vixl32::SRegister constant_s = LowSRegisterFrom(locations->GetTemp(1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1819 | vixl32::DRegister constant_d = FromLowSToD(constant_s); |
| 1820 | |
| 1821 | // temp_d = int-to-double(high) |
| 1822 | __ Vmov(temp_s, high); |
| 1823 | __ Vcvt(F64, I32, temp_d, temp_s); |
| 1824 | // constant_d = k2Pow32EncodingForDouble |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1825 | __ Vmov(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1826 | // out_d = unsigned-to-double(low) |
| 1827 | __ Vmov(out_s, low); |
| 1828 | __ Vcvt(F64, U32, out_d, out_s); |
| 1829 | // out_d += temp_d * constant_d |
| 1830 | __ Vmla(F64, out_d, temp_d, constant_d); |
| 1831 | break; |
| 1832 | } |
| 1833 | |
| 1834 | case Primitive::kPrimFloat: |
| 1835 | // Processing a Dex `float-to-double' instruction. |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1836 | __ Vcvt(F64, F32, FromLowSToD(LowSRegisterFrom(out)), InputSRegisterAt(conversion, 0)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1837 | break; |
| 1838 | |
| 1839 | default: |
| 1840 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1841 | << " to " << result_type; |
| 1842 | }; |
| 1843 | break; |
| 1844 | |
| 1845 | default: |
| 1846 | LOG(FATAL) << "Unexpected type conversion from " << input_type |
| 1847 | << " to " << result_type; |
| 1848 | } |
| 1849 | } |
| 1850 | |
| 1851 | void LocationsBuilderARMVIXL::VisitAdd(HAdd* add) { |
| 1852 | LocationSummary* locations = |
| 1853 | new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall); |
| 1854 | switch (add->GetResultType()) { |
| 1855 | case Primitive::kPrimInt: { |
| 1856 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1857 | locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1))); |
| 1858 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1859 | break; |
| 1860 | } |
| 1861 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1862 | // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/ |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1863 | case Primitive::kPrimLong: { |
| 1864 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1865 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1866 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1867 | break; |
| 1868 | } |
| 1869 | |
| 1870 | case Primitive::kPrimFloat: |
| 1871 | case Primitive::kPrimDouble: { |
| 1872 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1873 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 1874 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 1875 | break; |
| 1876 | } |
| 1877 | |
| 1878 | default: |
| 1879 | LOG(FATAL) << "Unexpected add type " << add->GetResultType(); |
| 1880 | } |
| 1881 | } |
| 1882 | |
| 1883 | void InstructionCodeGeneratorARMVIXL::VisitAdd(HAdd* add) { |
| 1884 | LocationSummary* locations = add->GetLocations(); |
| 1885 | Location out = locations->Out(); |
| 1886 | Location first = locations->InAt(0); |
| 1887 | Location second = locations->InAt(1); |
| 1888 | |
| 1889 | switch (add->GetResultType()) { |
| 1890 | case Primitive::kPrimInt: { |
| 1891 | __ Add(OutputRegister(add), InputRegisterAt(add, 0), InputOperandAt(add, 1)); |
| 1892 | } |
| 1893 | break; |
| 1894 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1895 | // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/ |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1896 | case Primitive::kPrimLong: { |
| 1897 | DCHECK(second.IsRegisterPair()); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1898 | __ Adds(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second)); |
| 1899 | __ Adc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1900 | break; |
| 1901 | } |
| 1902 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1903 | case Primitive::kPrimFloat: |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1904 | case Primitive::kPrimDouble: |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1905 | __ Vadd(OutputVRegister(add), InputVRegisterAt(add, 0), InputVRegisterAt(add, 1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1906 | break; |
| 1907 | |
| 1908 | default: |
| 1909 | LOG(FATAL) << "Unexpected add type " << add->GetResultType(); |
| 1910 | } |
| 1911 | } |
| 1912 | |
| 1913 | void LocationsBuilderARMVIXL::VisitSub(HSub* sub) { |
| 1914 | LocationSummary* locations = |
| 1915 | new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall); |
| 1916 | switch (sub->GetResultType()) { |
| 1917 | case Primitive::kPrimInt: { |
| 1918 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1919 | locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1))); |
| 1920 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1921 | break; |
| 1922 | } |
| 1923 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1924 | // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/ |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1925 | case Primitive::kPrimLong: { |
| 1926 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1927 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1928 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1929 | break; |
| 1930 | } |
| 1931 | case Primitive::kPrimFloat: |
| 1932 | case Primitive::kPrimDouble: { |
| 1933 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1934 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 1935 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 1936 | break; |
| 1937 | } |
| 1938 | default: |
| 1939 | LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); |
| 1940 | } |
| 1941 | } |
| 1942 | |
| 1943 | void InstructionCodeGeneratorARMVIXL::VisitSub(HSub* sub) { |
| 1944 | LocationSummary* locations = sub->GetLocations(); |
| 1945 | Location out = locations->Out(); |
| 1946 | Location first = locations->InAt(0); |
| 1947 | Location second = locations->InAt(1); |
| 1948 | switch (sub->GetResultType()) { |
| 1949 | case Primitive::kPrimInt: { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1950 | __ Sub(OutputRegister(sub), InputRegisterAt(sub, 0), InputOperandAt(sub, 1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1951 | break; |
| 1952 | } |
| 1953 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1954 | // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/ |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1955 | case Primitive::kPrimLong: { |
| 1956 | DCHECK(second.IsRegisterPair()); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1957 | __ Subs(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second)); |
| 1958 | __ Sbc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1959 | break; |
| 1960 | } |
| 1961 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 1962 | case Primitive::kPrimFloat: |
| 1963 | case Primitive::kPrimDouble: |
| 1964 | __ Vsub(OutputVRegister(sub), InputVRegisterAt(sub, 0), InputVRegisterAt(sub, 1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1965 | break; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 1966 | |
| 1967 | default: |
| 1968 | LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); |
| 1969 | } |
| 1970 | } |
| 1971 | |
| 1972 | void LocationsBuilderARMVIXL::VisitMul(HMul* mul) { |
| 1973 | LocationSummary* locations = |
| 1974 | new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); |
| 1975 | switch (mul->GetResultType()) { |
| 1976 | case Primitive::kPrimInt: |
| 1977 | case Primitive::kPrimLong: { |
| 1978 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1979 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1980 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 1981 | break; |
| 1982 | } |
| 1983 | |
| 1984 | case Primitive::kPrimFloat: |
| 1985 | case Primitive::kPrimDouble: { |
| 1986 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 1987 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 1988 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 1989 | break; |
| 1990 | } |
| 1991 | |
| 1992 | default: |
| 1993 | LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); |
| 1994 | } |
| 1995 | } |
| 1996 | |
| 1997 | void InstructionCodeGeneratorARMVIXL::VisitMul(HMul* mul) { |
| 1998 | LocationSummary* locations = mul->GetLocations(); |
| 1999 | Location out = locations->Out(); |
| 2000 | Location first = locations->InAt(0); |
| 2001 | Location second = locations->InAt(1); |
| 2002 | switch (mul->GetResultType()) { |
| 2003 | case Primitive::kPrimInt: { |
| 2004 | __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1)); |
| 2005 | break; |
| 2006 | } |
| 2007 | case Primitive::kPrimLong: { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2008 | vixl32::Register out_hi = HighRegisterFrom(out); |
| 2009 | vixl32::Register out_lo = LowRegisterFrom(out); |
| 2010 | vixl32::Register in1_hi = HighRegisterFrom(first); |
| 2011 | vixl32::Register in1_lo = LowRegisterFrom(first); |
| 2012 | vixl32::Register in2_hi = HighRegisterFrom(second); |
| 2013 | vixl32::Register in2_lo = LowRegisterFrom(second); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2014 | |
| 2015 | // Extra checks to protect caused by the existence of R1_R2. |
| 2016 | // The algorithm is wrong if out.hi is either in1.lo or in2.lo: |
| 2017 | // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2); |
| 2018 | DCHECK_NE(out_hi.GetCode(), in1_lo.GetCode()); |
| 2019 | DCHECK_NE(out_hi.GetCode(), in2_lo.GetCode()); |
| 2020 | |
| 2021 | // input: in1 - 64 bits, in2 - 64 bits |
| 2022 | // output: out |
| 2023 | // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo |
| 2024 | // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32] |
| 2025 | // parts: out.lo = (in1.lo * in2.lo)[31:0] |
| 2026 | |
| 2027 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 2028 | vixl32::Register temp = temps.Acquire(); |
| 2029 | // temp <- in1.lo * in2.hi |
| 2030 | __ Mul(temp, in1_lo, in2_hi); |
| 2031 | // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo |
| 2032 | __ Mla(out_hi, in1_hi, in2_lo, temp); |
| 2033 | // out.lo <- (in1.lo * in2.lo)[31:0]; |
| 2034 | __ Umull(out_lo, temp, in1_lo, in2_lo); |
| 2035 | // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32] |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2036 | __ Add(out_hi, out_hi, temp); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2037 | break; |
| 2038 | } |
| 2039 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2040 | case Primitive::kPrimFloat: |
| 2041 | case Primitive::kPrimDouble: |
| 2042 | __ Vmul(OutputVRegister(mul), InputVRegisterAt(mul, 0), InputVRegisterAt(mul, 1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2043 | break; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2044 | |
| 2045 | default: |
| 2046 | LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); |
| 2047 | } |
| 2048 | } |
| 2049 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2050 | void InstructionCodeGeneratorARMVIXL::DivRemOneOrMinusOne(HBinaryOperation* instruction) { |
| 2051 | DCHECK(instruction->IsDiv() || instruction->IsRem()); |
| 2052 | DCHECK(instruction->GetResultType() == Primitive::kPrimInt); |
| 2053 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2054 | Location second = instruction->GetLocations()->InAt(1); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2055 | DCHECK(second.IsConstant()); |
| 2056 | |
| 2057 | vixl32::Register out = OutputRegister(instruction); |
| 2058 | vixl32::Register dividend = InputRegisterAt(instruction, 0); |
| 2059 | int32_t imm = second.GetConstant()->AsIntConstant()->GetValue(); |
| 2060 | DCHECK(imm == 1 || imm == -1); |
| 2061 | |
| 2062 | if (instruction->IsRem()) { |
| 2063 | __ Mov(out, 0); |
| 2064 | } else { |
| 2065 | if (imm == 1) { |
| 2066 | __ Mov(out, dividend); |
| 2067 | } else { |
| 2068 | __ Rsb(out, dividend, 0); |
| 2069 | } |
| 2070 | } |
| 2071 | } |
| 2072 | |
| 2073 | void InstructionCodeGeneratorARMVIXL::DivRemByPowerOfTwo(HBinaryOperation* instruction) { |
| 2074 | DCHECK(instruction->IsDiv() || instruction->IsRem()); |
| 2075 | DCHECK(instruction->GetResultType() == Primitive::kPrimInt); |
| 2076 | |
| 2077 | LocationSummary* locations = instruction->GetLocations(); |
| 2078 | Location second = locations->InAt(1); |
| 2079 | DCHECK(second.IsConstant()); |
| 2080 | |
| 2081 | vixl32::Register out = OutputRegister(instruction); |
| 2082 | vixl32::Register dividend = InputRegisterAt(instruction, 0); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2083 | vixl32::Register temp = RegisterFrom(locations->GetTemp(0)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2084 | int32_t imm = second.GetConstant()->AsIntConstant()->GetValue(); |
| 2085 | uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm)); |
| 2086 | int ctz_imm = CTZ(abs_imm); |
| 2087 | |
| 2088 | if (ctz_imm == 1) { |
| 2089 | __ Lsr(temp, dividend, 32 - ctz_imm); |
| 2090 | } else { |
| 2091 | __ Asr(temp, dividend, 31); |
| 2092 | __ Lsr(temp, temp, 32 - ctz_imm); |
| 2093 | } |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2094 | __ Add(out, temp, dividend); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2095 | |
| 2096 | if (instruction->IsDiv()) { |
| 2097 | __ Asr(out, out, ctz_imm); |
| 2098 | if (imm < 0) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2099 | __ Rsb(out, out, 0); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2100 | } |
| 2101 | } else { |
| 2102 | __ Ubfx(out, out, 0, ctz_imm); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2103 | __ Sub(out, out, temp); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2104 | } |
| 2105 | } |
| 2106 | |
| 2107 | void InstructionCodeGeneratorARMVIXL::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) { |
| 2108 | DCHECK(instruction->IsDiv() || instruction->IsRem()); |
| 2109 | DCHECK(instruction->GetResultType() == Primitive::kPrimInt); |
| 2110 | |
| 2111 | LocationSummary* locations = instruction->GetLocations(); |
| 2112 | Location second = locations->InAt(1); |
| 2113 | DCHECK(second.IsConstant()); |
| 2114 | |
| 2115 | vixl32::Register out = OutputRegister(instruction); |
| 2116 | vixl32::Register dividend = InputRegisterAt(instruction, 0); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2117 | vixl32::Register temp1 = RegisterFrom(locations->GetTemp(0)); |
| 2118 | vixl32::Register temp2 = RegisterFrom(locations->GetTemp(1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2119 | int64_t imm = second.GetConstant()->AsIntConstant()->GetValue(); |
| 2120 | |
| 2121 | int64_t magic; |
| 2122 | int shift; |
| 2123 | CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift); |
| 2124 | |
| 2125 | __ Mov(temp1, magic); |
| 2126 | __ Smull(temp2, temp1, dividend, temp1); |
| 2127 | |
| 2128 | if (imm > 0 && magic < 0) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2129 | __ Add(temp1, temp1, dividend); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2130 | } else if (imm < 0 && magic > 0) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2131 | __ Sub(temp1, temp1, dividend); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2132 | } |
| 2133 | |
| 2134 | if (shift != 0) { |
| 2135 | __ Asr(temp1, temp1, shift); |
| 2136 | } |
| 2137 | |
| 2138 | if (instruction->IsDiv()) { |
| 2139 | __ Sub(out, temp1, Operand(temp1, vixl32::Shift(ASR), 31)); |
| 2140 | } else { |
| 2141 | __ Sub(temp1, temp1, Operand(temp1, vixl32::Shift(ASR), 31)); |
| 2142 | // TODO: Strength reduction for mls. |
| 2143 | __ Mov(temp2, imm); |
| 2144 | __ Mls(out, temp1, temp2, dividend); |
| 2145 | } |
| 2146 | } |
| 2147 | |
| 2148 | void InstructionCodeGeneratorARMVIXL::GenerateDivRemConstantIntegral( |
| 2149 | HBinaryOperation* instruction) { |
| 2150 | DCHECK(instruction->IsDiv() || instruction->IsRem()); |
| 2151 | DCHECK(instruction->GetResultType() == Primitive::kPrimInt); |
| 2152 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2153 | Location second = instruction->GetLocations()->InAt(1); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2154 | DCHECK(second.IsConstant()); |
| 2155 | |
| 2156 | int32_t imm = second.GetConstant()->AsIntConstant()->GetValue(); |
| 2157 | if (imm == 0) { |
| 2158 | // Do not generate anything. DivZeroCheck would prevent any code to be executed. |
| 2159 | } else if (imm == 1 || imm == -1) { |
| 2160 | DivRemOneOrMinusOne(instruction); |
| 2161 | } else if (IsPowerOfTwo(AbsOrMin(imm))) { |
| 2162 | DivRemByPowerOfTwo(instruction); |
| 2163 | } else { |
| 2164 | DCHECK(imm <= -2 || imm >= 2); |
| 2165 | GenerateDivRemWithAnyConstant(instruction); |
| 2166 | } |
| 2167 | } |
| 2168 | |
| 2169 | void LocationsBuilderARMVIXL::VisitDiv(HDiv* div) { |
| 2170 | LocationSummary::CallKind call_kind = LocationSummary::kNoCall; |
| 2171 | if (div->GetResultType() == Primitive::kPrimLong) { |
| 2172 | // pLdiv runtime call. |
| 2173 | call_kind = LocationSummary::kCallOnMainOnly; |
| 2174 | } else if (div->GetResultType() == Primitive::kPrimInt && div->InputAt(1)->IsConstant()) { |
| 2175 | // sdiv will be replaced by other instruction sequence. |
| 2176 | } else if (div->GetResultType() == Primitive::kPrimInt && |
| 2177 | !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) { |
| 2178 | // pIdivmod runtime call. |
| 2179 | call_kind = LocationSummary::kCallOnMainOnly; |
| 2180 | } |
| 2181 | |
| 2182 | LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind); |
| 2183 | |
| 2184 | switch (div->GetResultType()) { |
| 2185 | case Primitive::kPrimInt: { |
| 2186 | if (div->InputAt(1)->IsConstant()) { |
| 2187 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2188 | locations->SetInAt(1, Location::ConstantLocation(div->InputAt(1)->AsConstant())); |
| 2189 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 2190 | int32_t value = div->InputAt(1)->AsIntConstant()->GetValue(); |
| 2191 | if (value == 1 || value == 0 || value == -1) { |
| 2192 | // No temp register required. |
| 2193 | } else { |
| 2194 | locations->AddTemp(Location::RequiresRegister()); |
| 2195 | if (!IsPowerOfTwo(AbsOrMin(value))) { |
| 2196 | locations->AddTemp(Location::RequiresRegister()); |
| 2197 | } |
| 2198 | } |
| 2199 | } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) { |
| 2200 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2201 | locations->SetInAt(1, Location::RequiresRegister()); |
| 2202 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 2203 | } else { |
| 2204 | TODO_VIXL32(FATAL); |
| 2205 | } |
| 2206 | break; |
| 2207 | } |
| 2208 | case Primitive::kPrimLong: { |
| 2209 | TODO_VIXL32(FATAL); |
| 2210 | break; |
| 2211 | } |
| 2212 | case Primitive::kPrimFloat: |
| 2213 | case Primitive::kPrimDouble: { |
| 2214 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 2215 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 2216 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 2217 | break; |
| 2218 | } |
| 2219 | |
| 2220 | default: |
| 2221 | LOG(FATAL) << "Unexpected div type " << div->GetResultType(); |
| 2222 | } |
| 2223 | } |
| 2224 | |
| 2225 | void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2226 | Location rhs = div->GetLocations()->InAt(1); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2227 | |
| 2228 | switch (div->GetResultType()) { |
| 2229 | case Primitive::kPrimInt: { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2230 | if (rhs.IsConstant()) { |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2231 | GenerateDivRemConstantIntegral(div); |
| 2232 | } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) { |
| 2233 | __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1)); |
| 2234 | } else { |
| 2235 | TODO_VIXL32(FATAL); |
| 2236 | } |
| 2237 | break; |
| 2238 | } |
| 2239 | |
| 2240 | case Primitive::kPrimLong: { |
| 2241 | TODO_VIXL32(FATAL); |
| 2242 | break; |
| 2243 | } |
| 2244 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2245 | case Primitive::kPrimFloat: |
| 2246 | case Primitive::kPrimDouble: |
| 2247 | __ Vdiv(OutputVRegister(div), InputVRegisterAt(div, 0), InputVRegisterAt(div, 1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2248 | break; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2249 | |
| 2250 | default: |
| 2251 | LOG(FATAL) << "Unexpected div type " << div->GetResultType(); |
| 2252 | } |
| 2253 | } |
| 2254 | |
| 2255 | void LocationsBuilderARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2256 | // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/ |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2257 | LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock() |
| 2258 | ? LocationSummary::kCallOnSlowPath |
| 2259 | : LocationSummary::kNoCall; |
| 2260 | LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); |
| 2261 | locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0))); |
| 2262 | if (instruction->HasUses()) { |
| 2263 | locations->SetOut(Location::SameAsFirstInput()); |
| 2264 | } |
| 2265 | } |
| 2266 | |
| 2267 | void InstructionCodeGeneratorARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) { |
| 2268 | DivZeroCheckSlowPathARMVIXL* slow_path = |
| 2269 | new (GetGraph()->GetArena()) DivZeroCheckSlowPathARMVIXL(instruction); |
| 2270 | codegen_->AddSlowPath(slow_path); |
| 2271 | |
| 2272 | LocationSummary* locations = instruction->GetLocations(); |
| 2273 | Location value = locations->InAt(0); |
| 2274 | |
| 2275 | switch (instruction->GetType()) { |
| 2276 | case Primitive::kPrimBoolean: |
| 2277 | case Primitive::kPrimByte: |
| 2278 | case Primitive::kPrimChar: |
| 2279 | case Primitive::kPrimShort: |
| 2280 | case Primitive::kPrimInt: { |
| 2281 | if (value.IsRegister()) { |
| 2282 | __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel()); |
| 2283 | } else { |
| 2284 | DCHECK(value.IsConstant()) << value; |
| 2285 | if (value.GetConstant()->AsIntConstant()->GetValue() == 0) { |
| 2286 | __ B(slow_path->GetEntryLabel()); |
| 2287 | } |
| 2288 | } |
| 2289 | break; |
| 2290 | } |
| 2291 | case Primitive::kPrimLong: { |
| 2292 | if (value.IsRegisterPair()) { |
| 2293 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 2294 | vixl32::Register temp = temps.Acquire(); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2295 | __ Orrs(temp, LowRegisterFrom(value), HighRegisterFrom(value)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 2296 | __ B(eq, slow_path->GetEntryLabel()); |
| 2297 | } else { |
| 2298 | DCHECK(value.IsConstant()) << value; |
| 2299 | if (value.GetConstant()->AsLongConstant()->GetValue() == 0) { |
| 2300 | __ B(slow_path->GetEntryLabel()); |
| 2301 | } |
| 2302 | } |
| 2303 | break; |
| 2304 | } |
| 2305 | default: |
| 2306 | LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType(); |
| 2307 | } |
| 2308 | } |
| 2309 | |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 2310 | void InstructionCodeGeneratorARMVIXL::HandleIntegerRotate(HRor* ror) { |
| 2311 | LocationSummary* locations = ror->GetLocations(); |
| 2312 | vixl32::Register in = InputRegisterAt(ror, 0); |
| 2313 | Location rhs = locations->InAt(1); |
| 2314 | vixl32::Register out = OutputRegister(ror); |
| 2315 | |
| 2316 | if (rhs.IsConstant()) { |
| 2317 | // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31], |
| 2318 | // so map all rotations to a +ve. equivalent in that range. |
| 2319 | // (e.g. left *or* right by -2 bits == 30 bits in the same direction.) |
| 2320 | uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()) & 0x1F; |
| 2321 | if (rot) { |
| 2322 | // Rotate, mapping left rotations to right equivalents if necessary. |
| 2323 | // (e.g. left by 2 bits == right by 30.) |
| 2324 | __ Ror(out, in, rot); |
| 2325 | } else if (!out.Is(in)) { |
| 2326 | __ Mov(out, in); |
| 2327 | } |
| 2328 | } else { |
| 2329 | __ Ror(out, in, RegisterFrom(rhs)); |
| 2330 | } |
| 2331 | } |
| 2332 | |
| 2333 | // Gain some speed by mapping all Long rotates onto equivalent pairs of Integer |
| 2334 | // rotates by swapping input regs (effectively rotating by the first 32-bits of |
| 2335 | // a larger rotation) or flipping direction (thus treating larger right/left |
| 2336 | // rotations as sub-word sized rotations in the other direction) as appropriate. |
| 2337 | void InstructionCodeGeneratorARMVIXL::HandleLongRotate(HRor* ror) { |
| 2338 | LocationSummary* locations = ror->GetLocations(); |
| 2339 | vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0)); |
| 2340 | vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0)); |
| 2341 | Location rhs = locations->InAt(1); |
| 2342 | vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out()); |
| 2343 | vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out()); |
| 2344 | |
| 2345 | if (rhs.IsConstant()) { |
| 2346 | uint64_t rot = CodeGenerator::GetInt64ValueOf(rhs.GetConstant()); |
| 2347 | // Map all rotations to +ve. equivalents on the interval [0,63]. |
| 2348 | rot &= kMaxLongShiftDistance; |
| 2349 | // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate |
| 2350 | // logic below to a simple pair of binary orr. |
| 2351 | // (e.g. 34 bits == in_reg swap + 2 bits right.) |
| 2352 | if (rot >= kArmBitsPerWord) { |
| 2353 | rot -= kArmBitsPerWord; |
| 2354 | std::swap(in_reg_hi, in_reg_lo); |
| 2355 | } |
| 2356 | // Rotate, or mov to out for zero or word size rotations. |
| 2357 | if (rot != 0u) { |
| 2358 | __ Lsr(out_reg_hi, in_reg_hi, rot); |
| 2359 | __ Orr(out_reg_hi, out_reg_hi, Operand(in_reg_lo, ShiftType::LSL, kArmBitsPerWord - rot)); |
| 2360 | __ Lsr(out_reg_lo, in_reg_lo, rot); |
| 2361 | __ Orr(out_reg_lo, out_reg_lo, Operand(in_reg_hi, ShiftType::LSL, kArmBitsPerWord - rot)); |
| 2362 | } else { |
| 2363 | __ Mov(out_reg_lo, in_reg_lo); |
| 2364 | __ Mov(out_reg_hi, in_reg_hi); |
| 2365 | } |
| 2366 | } else { |
| 2367 | vixl32::Register shift_right = RegisterFrom(locations->GetTemp(0)); |
| 2368 | vixl32::Register shift_left = RegisterFrom(locations->GetTemp(1)); |
| 2369 | vixl32::Label end; |
| 2370 | vixl32::Label shift_by_32_plus_shift_right; |
| 2371 | |
| 2372 | __ And(shift_right, RegisterFrom(rhs), 0x1F); |
| 2373 | __ Lsrs(shift_left, RegisterFrom(rhs), 6); |
| 2374 | // TODO(VIXL): Check that flags are kept after "vixl32::LeaveFlags" enabled. |
| 2375 | __ Rsb(shift_left, shift_right, kArmBitsPerWord); |
| 2376 | __ B(cc, &shift_by_32_plus_shift_right); |
| 2377 | |
| 2378 | // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right). |
| 2379 | // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right). |
| 2380 | __ Lsl(out_reg_hi, in_reg_hi, shift_left); |
| 2381 | __ Lsr(out_reg_lo, in_reg_lo, shift_right); |
| 2382 | __ Add(out_reg_hi, out_reg_hi, out_reg_lo); |
| 2383 | __ Lsl(out_reg_lo, in_reg_lo, shift_left); |
| 2384 | __ Lsr(shift_left, in_reg_hi, shift_right); |
| 2385 | __ Add(out_reg_lo, out_reg_lo, shift_left); |
| 2386 | __ B(&end); |
| 2387 | |
| 2388 | __ Bind(&shift_by_32_plus_shift_right); // Shift by 32+shift_right. |
| 2389 | // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left). |
| 2390 | // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left). |
| 2391 | __ Lsr(out_reg_hi, in_reg_hi, shift_right); |
| 2392 | __ Lsl(out_reg_lo, in_reg_lo, shift_left); |
| 2393 | __ Add(out_reg_hi, out_reg_hi, out_reg_lo); |
| 2394 | __ Lsr(out_reg_lo, in_reg_lo, shift_right); |
| 2395 | __ Lsl(shift_right, in_reg_hi, shift_left); |
| 2396 | __ Add(out_reg_lo, out_reg_lo, shift_right); |
| 2397 | |
| 2398 | __ Bind(&end); |
| 2399 | } |
| 2400 | } |
| 2401 | |
| 2402 | void LocationsBuilderARMVIXL::VisitRor(HRor* ror) { |
| 2403 | LocationSummary* locations = |
| 2404 | new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall); |
| 2405 | switch (ror->GetResultType()) { |
| 2406 | case Primitive::kPrimInt: { |
| 2407 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2408 | locations->SetInAt(1, Location::RegisterOrConstant(ror->InputAt(1))); |
| 2409 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 2410 | break; |
| 2411 | } |
| 2412 | case Primitive::kPrimLong: { |
| 2413 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2414 | if (ror->InputAt(1)->IsConstant()) { |
| 2415 | locations->SetInAt(1, Location::ConstantLocation(ror->InputAt(1)->AsConstant())); |
| 2416 | } else { |
| 2417 | locations->SetInAt(1, Location::RequiresRegister()); |
| 2418 | locations->AddTemp(Location::RequiresRegister()); |
| 2419 | locations->AddTemp(Location::RequiresRegister()); |
| 2420 | } |
| 2421 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 2422 | break; |
| 2423 | } |
| 2424 | default: |
| 2425 | LOG(FATAL) << "Unexpected operation type " << ror->GetResultType(); |
| 2426 | } |
| 2427 | } |
| 2428 | |
| 2429 | void InstructionCodeGeneratorARMVIXL::VisitRor(HRor* ror) { |
| 2430 | Primitive::Type type = ror->GetResultType(); |
| 2431 | switch (type) { |
| 2432 | case Primitive::kPrimInt: { |
| 2433 | HandleIntegerRotate(ror); |
| 2434 | break; |
| 2435 | } |
| 2436 | case Primitive::kPrimLong: { |
| 2437 | HandleLongRotate(ror); |
| 2438 | break; |
| 2439 | } |
| 2440 | default: |
| 2441 | LOG(FATAL) << "Unexpected operation type " << type; |
| 2442 | UNREACHABLE(); |
| 2443 | } |
| 2444 | } |
| 2445 | |
Artem Serov | 02d3783 | 2016-10-25 15:25:33 +0100 | [diff] [blame^] | 2446 | void LocationsBuilderARMVIXL::HandleShift(HBinaryOperation* op) { |
| 2447 | DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); |
| 2448 | |
| 2449 | LocationSummary* locations = |
| 2450 | new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall); |
| 2451 | |
| 2452 | switch (op->GetResultType()) { |
| 2453 | case Primitive::kPrimInt: { |
| 2454 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2455 | if (op->InputAt(1)->IsConstant()) { |
| 2456 | locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant())); |
| 2457 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 2458 | } else { |
| 2459 | locations->SetInAt(1, Location::RequiresRegister()); |
| 2460 | // Make the output overlap, as it will be used to hold the masked |
| 2461 | // second input. |
| 2462 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 2463 | } |
| 2464 | break; |
| 2465 | } |
| 2466 | case Primitive::kPrimLong: { |
| 2467 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2468 | if (op->InputAt(1)->IsConstant()) { |
| 2469 | locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant())); |
| 2470 | // For simplicity, use kOutputOverlap even though we only require that low registers |
| 2471 | // don't clash with high registers which the register allocator currently guarantees. |
| 2472 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 2473 | } else { |
| 2474 | locations->SetInAt(1, Location::RequiresRegister()); |
| 2475 | locations->AddTemp(Location::RequiresRegister()); |
| 2476 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 2477 | } |
| 2478 | break; |
| 2479 | } |
| 2480 | default: |
| 2481 | LOG(FATAL) << "Unexpected operation type " << op->GetResultType(); |
| 2482 | } |
| 2483 | } |
| 2484 | |
| 2485 | void InstructionCodeGeneratorARMVIXL::HandleShift(HBinaryOperation* op) { |
| 2486 | DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); |
| 2487 | |
| 2488 | LocationSummary* locations = op->GetLocations(); |
| 2489 | Location out = locations->Out(); |
| 2490 | Location first = locations->InAt(0); |
| 2491 | Location second = locations->InAt(1); |
| 2492 | |
| 2493 | Primitive::Type type = op->GetResultType(); |
| 2494 | switch (type) { |
| 2495 | case Primitive::kPrimInt: { |
| 2496 | vixl32::Register out_reg = OutputRegister(op); |
| 2497 | vixl32::Register first_reg = InputRegisterAt(op, 0); |
| 2498 | if (second.IsRegister()) { |
| 2499 | vixl32::Register second_reg = RegisterFrom(second); |
| 2500 | // ARM doesn't mask the shift count so we need to do it ourselves. |
| 2501 | __ And(out_reg, second_reg, kMaxIntShiftDistance); |
| 2502 | if (op->IsShl()) { |
| 2503 | __ Lsl(out_reg, first_reg, out_reg); |
| 2504 | } else if (op->IsShr()) { |
| 2505 | __ Asr(out_reg, first_reg, out_reg); |
| 2506 | } else { |
| 2507 | __ Lsr(out_reg, first_reg, out_reg); |
| 2508 | } |
| 2509 | } else { |
| 2510 | int32_t cst = second.GetConstant()->AsIntConstant()->GetValue(); |
| 2511 | uint32_t shift_value = cst & kMaxIntShiftDistance; |
| 2512 | if (shift_value == 0) { // ARM does not support shifting with 0 immediate. |
| 2513 | __ Mov(out_reg, first_reg); |
| 2514 | } else if (op->IsShl()) { |
| 2515 | __ Lsl(out_reg, first_reg, shift_value); |
| 2516 | } else if (op->IsShr()) { |
| 2517 | __ Asr(out_reg, first_reg, shift_value); |
| 2518 | } else { |
| 2519 | __ Lsr(out_reg, first_reg, shift_value); |
| 2520 | } |
| 2521 | } |
| 2522 | break; |
| 2523 | } |
| 2524 | case Primitive::kPrimLong: { |
| 2525 | vixl32::Register o_h = HighRegisterFrom(out); |
| 2526 | vixl32::Register o_l = LowRegisterFrom(out); |
| 2527 | |
| 2528 | vixl32::Register high = HighRegisterFrom(first); |
| 2529 | vixl32::Register low = LowRegisterFrom(first); |
| 2530 | |
| 2531 | if (second.IsRegister()) { |
| 2532 | vixl32::Register temp = RegisterFrom(locations->GetTemp(0)); |
| 2533 | |
| 2534 | vixl32::Register second_reg = RegisterFrom(second); |
| 2535 | |
| 2536 | if (op->IsShl()) { |
| 2537 | __ And(o_l, second_reg, kMaxLongShiftDistance); |
| 2538 | // Shift the high part |
| 2539 | __ Lsl(o_h, high, o_l); |
| 2540 | // Shift the low part and `or` what overflew on the high part |
| 2541 | __ Rsb(temp, o_l, kArmBitsPerWord); |
| 2542 | __ Lsr(temp, low, temp); |
| 2543 | __ Orr(o_h, o_h, temp); |
| 2544 | // If the shift is > 32 bits, override the high part |
| 2545 | __ Subs(temp, o_l, kArmBitsPerWord); |
| 2546 | { |
| 2547 | AssemblerAccurateScope guard(GetVIXLAssembler(), |
| 2548 | 3 * kArmInstrMaxSizeInBytes, |
| 2549 | CodeBufferCheckScope::kMaximumSize); |
| 2550 | __ it(pl); |
| 2551 | __ lsl(pl, o_h, low, temp); |
| 2552 | } |
| 2553 | // Shift the low part |
| 2554 | __ Lsl(o_l, low, o_l); |
| 2555 | } else if (op->IsShr()) { |
| 2556 | __ And(o_h, second_reg, kMaxLongShiftDistance); |
| 2557 | // Shift the low part |
| 2558 | __ Lsr(o_l, low, o_h); |
| 2559 | // Shift the high part and `or` what underflew on the low part |
| 2560 | __ Rsb(temp, o_h, kArmBitsPerWord); |
| 2561 | __ Lsl(temp, high, temp); |
| 2562 | __ Orr(o_l, o_l, temp); |
| 2563 | // If the shift is > 32 bits, override the low part |
| 2564 | __ Subs(temp, o_h, kArmBitsPerWord); |
| 2565 | { |
| 2566 | AssemblerAccurateScope guard(GetVIXLAssembler(), |
| 2567 | 3 * kArmInstrMaxSizeInBytes, |
| 2568 | CodeBufferCheckScope::kMaximumSize); |
| 2569 | __ it(pl); |
| 2570 | __ asr(pl, o_l, high, temp); |
| 2571 | } |
| 2572 | // Shift the high part |
| 2573 | __ Asr(o_h, high, o_h); |
| 2574 | } else { |
| 2575 | __ And(o_h, second_reg, kMaxLongShiftDistance); |
| 2576 | // same as Shr except we use `Lsr`s and not `Asr`s |
| 2577 | __ Lsr(o_l, low, o_h); |
| 2578 | __ Rsb(temp, o_h, kArmBitsPerWord); |
| 2579 | __ Lsl(temp, high, temp); |
| 2580 | __ Orr(o_l, o_l, temp); |
| 2581 | __ Subs(temp, o_h, kArmBitsPerWord); |
| 2582 | { |
| 2583 | AssemblerAccurateScope guard(GetVIXLAssembler(), |
| 2584 | 3 * kArmInstrMaxSizeInBytes, |
| 2585 | CodeBufferCheckScope::kMaximumSize); |
| 2586 | __ it(pl); |
| 2587 | __ lsr(pl, o_l, high, temp); |
| 2588 | } |
| 2589 | __ Lsr(o_h, high, o_h); |
| 2590 | } |
| 2591 | } else { |
| 2592 | // Register allocator doesn't create partial overlap. |
| 2593 | DCHECK(!o_l.Is(high)); |
| 2594 | DCHECK(!o_h.Is(low)); |
| 2595 | int32_t cst = second.GetConstant()->AsIntConstant()->GetValue(); |
| 2596 | uint32_t shift_value = cst & kMaxLongShiftDistance; |
| 2597 | if (shift_value > 32) { |
| 2598 | if (op->IsShl()) { |
| 2599 | __ Lsl(o_h, low, shift_value - 32); |
| 2600 | __ Mov(o_l, 0); |
| 2601 | } else if (op->IsShr()) { |
| 2602 | __ Asr(o_l, high, shift_value - 32); |
| 2603 | __ Asr(o_h, high, 31); |
| 2604 | } else { |
| 2605 | __ Lsr(o_l, high, shift_value - 32); |
| 2606 | __ Mov(o_h, 0); |
| 2607 | } |
| 2608 | } else if (shift_value == 32) { |
| 2609 | if (op->IsShl()) { |
| 2610 | __ Mov(o_h, low); |
| 2611 | __ Mov(o_l, 0); |
| 2612 | } else if (op->IsShr()) { |
| 2613 | __ Mov(o_l, high); |
| 2614 | __ Asr(o_h, high, 31); |
| 2615 | } else { |
| 2616 | __ Mov(o_l, high); |
| 2617 | __ Mov(o_h, 0); |
| 2618 | } |
| 2619 | } else if (shift_value == 1) { |
| 2620 | if (op->IsShl()) { |
| 2621 | __ Lsls(o_l, low, 1); |
| 2622 | __ Adc(o_h, high, high); |
| 2623 | } else if (op->IsShr()) { |
| 2624 | __ Asrs(o_h, high, 1); |
| 2625 | __ Rrx(o_l, low); |
| 2626 | } else { |
| 2627 | __ Lsrs(o_h, high, 1); |
| 2628 | __ Rrx(o_l, low); |
| 2629 | } |
| 2630 | } else { |
| 2631 | DCHECK(2 <= shift_value && shift_value < 32) << shift_value; |
| 2632 | if (op->IsShl()) { |
| 2633 | __ Lsl(o_h, high, shift_value); |
| 2634 | __ Orr(o_h, o_h, Operand(low, ShiftType::LSR, 32 - shift_value)); |
| 2635 | __ Lsl(o_l, low, shift_value); |
| 2636 | } else if (op->IsShr()) { |
| 2637 | __ Lsr(o_l, low, shift_value); |
| 2638 | __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value)); |
| 2639 | __ Asr(o_h, high, shift_value); |
| 2640 | } else { |
| 2641 | __ Lsr(o_l, low, shift_value); |
| 2642 | __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value)); |
| 2643 | __ Lsr(o_h, high, shift_value); |
| 2644 | } |
| 2645 | } |
| 2646 | } |
| 2647 | break; |
| 2648 | } |
| 2649 | default: |
| 2650 | LOG(FATAL) << "Unexpected operation type " << type; |
| 2651 | UNREACHABLE(); |
| 2652 | } |
| 2653 | } |
| 2654 | |
| 2655 | void LocationsBuilderARMVIXL::VisitShl(HShl* shl) { |
| 2656 | HandleShift(shl); |
| 2657 | } |
| 2658 | |
| 2659 | void InstructionCodeGeneratorARMVIXL::VisitShl(HShl* shl) { |
| 2660 | HandleShift(shl); |
| 2661 | } |
| 2662 | |
| 2663 | void LocationsBuilderARMVIXL::VisitShr(HShr* shr) { |
| 2664 | HandleShift(shr); |
| 2665 | } |
| 2666 | |
| 2667 | void InstructionCodeGeneratorARMVIXL::VisitShr(HShr* shr) { |
| 2668 | HandleShift(shr); |
| 2669 | } |
| 2670 | |
| 2671 | void LocationsBuilderARMVIXL::VisitUShr(HUShr* ushr) { |
| 2672 | HandleShift(ushr); |
| 2673 | } |
| 2674 | |
| 2675 | void InstructionCodeGeneratorARMVIXL::VisitUShr(HUShr* ushr) { |
| 2676 | HandleShift(ushr); |
| 2677 | } |
| 2678 | |
| 2679 | void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) { |
| 2680 | LocationSummary* locations = |
| 2681 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly); |
| 2682 | if (instruction->IsStringAlloc()) { |
| 2683 | locations->AddTemp(LocationFrom(kMethodRegister)); |
| 2684 | } else { |
| 2685 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 2686 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 2687 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 2688 | } |
| 2689 | locations->SetOut(LocationFrom(r0)); |
| 2690 | } |
| 2691 | |
| 2692 | void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) { |
| 2693 | // Note: if heap poisoning is enabled, the entry point takes cares |
| 2694 | // of poisoning the reference. |
| 2695 | if (instruction->IsStringAlloc()) { |
| 2696 | // String is allocated through StringFactory. Call NewEmptyString entry point. |
| 2697 | vixl32::Register temp = RegisterFrom(instruction->GetLocations()->GetTemp(0)); |
| 2698 | MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize); |
| 2699 | GetAssembler()->LoadFromOffset(kLoadWord, temp, tr, QUICK_ENTRY_POINT(pNewEmptyString)); |
| 2700 | GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, code_offset.Int32Value()); |
| 2701 | AssemblerAccurateScope aas(GetVIXLAssembler(), |
| 2702 | kArmInstrMaxSizeInBytes, |
| 2703 | CodeBufferCheckScope::kMaximumSize); |
| 2704 | __ blx(lr); |
| 2705 | codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); |
| 2706 | } else { |
| 2707 | codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc()); |
| 2708 | CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>(); |
| 2709 | } |
| 2710 | } |
| 2711 | |
| 2712 | void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) { |
| 2713 | LocationSummary* locations = |
| 2714 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly); |
| 2715 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 2716 | locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0))); |
| 2717 | locations->SetOut(LocationFrom(r0)); |
| 2718 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 2719 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 2720 | } |
| 2721 | |
| 2722 | void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) { |
| 2723 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 2724 | __ Mov(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); |
| 2725 | // Note: if heap poisoning is enabled, the entry point takes cares |
| 2726 | // of poisoning the reference. |
| 2727 | codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc()); |
| 2728 | CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>(); |
| 2729 | } |
| 2730 | |
| 2731 | void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) { |
| 2732 | LocationSummary* locations = |
| 2733 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); |
| 2734 | Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); |
| 2735 | if (location.IsStackSlot()) { |
| 2736 | location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); |
| 2737 | } else if (location.IsDoubleStackSlot()) { |
| 2738 | location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); |
| 2739 | } |
| 2740 | locations->SetOut(location); |
| 2741 | } |
| 2742 | |
| 2743 | void InstructionCodeGeneratorARMVIXL::VisitParameterValue( |
| 2744 | HParameterValue* instruction ATTRIBUTE_UNUSED) { |
| 2745 | // Nothing to do, the parameter is already at its location. |
| 2746 | } |
| 2747 | |
| 2748 | void LocationsBuilderARMVIXL::VisitCurrentMethod(HCurrentMethod* instruction) { |
| 2749 | LocationSummary* locations = |
| 2750 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); |
| 2751 | locations->SetOut(LocationFrom(kMethodRegister)); |
| 2752 | } |
| 2753 | |
| 2754 | void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod( |
| 2755 | HCurrentMethod* instruction ATTRIBUTE_UNUSED) { |
| 2756 | // Nothing to do, the method is already at its location. |
| 2757 | } |
| 2758 | |
| 2759 | void LocationsBuilderARMVIXL::VisitNot(HNot* not_) { |
| 2760 | LocationSummary* locations = |
| 2761 | new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall); |
| 2762 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2763 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 2764 | } |
| 2765 | |
| 2766 | void InstructionCodeGeneratorARMVIXL::VisitNot(HNot* not_) { |
| 2767 | LocationSummary* locations = not_->GetLocations(); |
| 2768 | Location out = locations->Out(); |
| 2769 | Location in = locations->InAt(0); |
| 2770 | switch (not_->GetResultType()) { |
| 2771 | case Primitive::kPrimInt: |
| 2772 | __ Mvn(OutputRegister(not_), InputRegisterAt(not_, 0)); |
| 2773 | break; |
| 2774 | |
| 2775 | case Primitive::kPrimLong: |
| 2776 | __ Mvn(LowRegisterFrom(out), LowRegisterFrom(in)); |
| 2777 | __ Mvn(HighRegisterFrom(out), HighRegisterFrom(in)); |
| 2778 | break; |
| 2779 | |
| 2780 | default: |
| 2781 | LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType(); |
| 2782 | } |
| 2783 | } |
| 2784 | |
| 2785 | void LocationsBuilderARMVIXL::VisitCompare(HCompare* compare) { |
| 2786 | LocationSummary* locations = |
| 2787 | new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); |
| 2788 | switch (compare->InputAt(0)->GetType()) { |
| 2789 | case Primitive::kPrimBoolean: |
| 2790 | case Primitive::kPrimByte: |
| 2791 | case Primitive::kPrimShort: |
| 2792 | case Primitive::kPrimChar: |
| 2793 | case Primitive::kPrimInt: |
| 2794 | case Primitive::kPrimLong: { |
| 2795 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2796 | locations->SetInAt(1, Location::RequiresRegister()); |
| 2797 | // Output overlaps because it is written before doing the low comparison. |
| 2798 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 2799 | break; |
| 2800 | } |
| 2801 | case Primitive::kPrimFloat: |
| 2802 | case Primitive::kPrimDouble: { |
| 2803 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 2804 | locations->SetInAt(1, ArithmeticZeroOrFpuRegister(compare->InputAt(1))); |
| 2805 | locations->SetOut(Location::RequiresRegister()); |
| 2806 | break; |
| 2807 | } |
| 2808 | default: |
| 2809 | LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType(); |
| 2810 | } |
| 2811 | } |
| 2812 | |
| 2813 | void InstructionCodeGeneratorARMVIXL::VisitCompare(HCompare* compare) { |
| 2814 | LocationSummary* locations = compare->GetLocations(); |
| 2815 | vixl32::Register out = OutputRegister(compare); |
| 2816 | Location left = locations->InAt(0); |
| 2817 | Location right = locations->InAt(1); |
| 2818 | |
| 2819 | vixl32::Label less, greater, done; |
| 2820 | Primitive::Type type = compare->InputAt(0)->GetType(); |
| 2821 | vixl32::Condition less_cond = vixl32::Condition(kNone); |
| 2822 | switch (type) { |
| 2823 | case Primitive::kPrimBoolean: |
| 2824 | case Primitive::kPrimByte: |
| 2825 | case Primitive::kPrimShort: |
| 2826 | case Primitive::kPrimChar: |
| 2827 | case Primitive::kPrimInt: { |
| 2828 | // Emit move to `out` before the `Cmp`, as `Mov` might affect the status flags. |
| 2829 | __ Mov(out, 0); |
| 2830 | __ Cmp(RegisterFrom(left), RegisterFrom(right)); // Signed compare. |
| 2831 | less_cond = lt; |
| 2832 | break; |
| 2833 | } |
| 2834 | case Primitive::kPrimLong: { |
| 2835 | __ Cmp(HighRegisterFrom(left), HighRegisterFrom(right)); // Signed compare. |
| 2836 | __ B(lt, &less); |
| 2837 | __ B(gt, &greater); |
| 2838 | // Emit move to `out` before the last `Cmp`, as `Mov` might affect the status flags. |
| 2839 | __ Mov(out, 0); |
| 2840 | __ Cmp(LowRegisterFrom(left), LowRegisterFrom(right)); // Unsigned compare. |
| 2841 | less_cond = lo; |
| 2842 | break; |
| 2843 | } |
| 2844 | case Primitive::kPrimFloat: |
| 2845 | case Primitive::kPrimDouble: { |
| 2846 | __ Mov(out, 0); |
| 2847 | GenerateVcmp(compare); |
| 2848 | // To branch on the FP compare result we transfer FPSCR to APSR (encoded as PC in VMRS). |
| 2849 | __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR); |
| 2850 | less_cond = ARMFPCondition(kCondLT, compare->IsGtBias()); |
| 2851 | break; |
| 2852 | } |
| 2853 | default: |
| 2854 | LOG(FATAL) << "Unexpected compare type " << type; |
| 2855 | UNREACHABLE(); |
| 2856 | } |
| 2857 | |
| 2858 | __ B(eq, &done); |
| 2859 | __ B(less_cond, &less); |
| 2860 | |
| 2861 | __ Bind(&greater); |
| 2862 | __ Mov(out, 1); |
| 2863 | __ B(&done); |
| 2864 | |
| 2865 | __ Bind(&less); |
| 2866 | __ Mov(out, -1); |
| 2867 | |
| 2868 | __ Bind(&done); |
| 2869 | } |
| 2870 | |
| 2871 | void LocationsBuilderARMVIXL::VisitPhi(HPhi* instruction) { |
| 2872 | LocationSummary* locations = |
| 2873 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); |
| 2874 | for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) { |
| 2875 | locations->SetInAt(i, Location::Any()); |
| 2876 | } |
| 2877 | locations->SetOut(Location::Any()); |
| 2878 | } |
| 2879 | |
| 2880 | void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) { |
| 2881 | LOG(FATAL) << "Unreachable"; |
| 2882 | } |
| 2883 | |
| 2884 | void CodeGeneratorARMVIXL::GenerateMemoryBarrier(MemBarrierKind kind) { |
| 2885 | // TODO (ported from quick): revisit ARM barrier kinds. |
| 2886 | DmbOptions flavor = DmbOptions::ISH; // Quiet C++ warnings. |
| 2887 | switch (kind) { |
| 2888 | case MemBarrierKind::kAnyStore: |
| 2889 | case MemBarrierKind::kLoadAny: |
| 2890 | case MemBarrierKind::kAnyAny: { |
| 2891 | flavor = DmbOptions::ISH; |
| 2892 | break; |
| 2893 | } |
| 2894 | case MemBarrierKind::kStoreStore: { |
| 2895 | flavor = DmbOptions::ISHST; |
| 2896 | break; |
| 2897 | } |
| 2898 | default: |
| 2899 | LOG(FATAL) << "Unexpected memory barrier " << kind; |
| 2900 | } |
| 2901 | __ Dmb(flavor); |
| 2902 | } |
| 2903 | |
| 2904 | void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicLoad(vixl32::Register addr, |
| 2905 | uint32_t offset, |
| 2906 | vixl32::Register out_lo, |
| 2907 | vixl32::Register out_hi) { |
| 2908 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 2909 | if (offset != 0) { |
| 2910 | vixl32::Register temp = temps.Acquire(); |
| 2911 | __ Add(temp, addr, offset); |
| 2912 | addr = temp; |
| 2913 | } |
| 2914 | __ Ldrexd(out_lo, out_hi, addr); |
| 2915 | } |
| 2916 | |
| 2917 | void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicStore(vixl32::Register addr, |
| 2918 | uint32_t offset, |
| 2919 | vixl32::Register value_lo, |
| 2920 | vixl32::Register value_hi, |
| 2921 | vixl32::Register temp1, |
| 2922 | vixl32::Register temp2, |
| 2923 | HInstruction* instruction) { |
| 2924 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 2925 | vixl32::Label fail; |
| 2926 | if (offset != 0) { |
| 2927 | vixl32::Register temp = temps.Acquire(); |
| 2928 | __ Add(temp, addr, offset); |
| 2929 | addr = temp; |
| 2930 | } |
| 2931 | __ Bind(&fail); |
| 2932 | // We need a load followed by store. (The address used in a STREX instruction must |
| 2933 | // be the same as the address in the most recently executed LDREX instruction.) |
| 2934 | __ Ldrexd(temp1, temp2, addr); |
| 2935 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 2936 | __ Strexd(temp1, value_lo, value_hi, addr); |
| 2937 | __ Cbnz(temp1, &fail); |
| 2938 | } |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 2939 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 2940 | void LocationsBuilderARMVIXL::HandleFieldSet( |
| 2941 | HInstruction* instruction, const FieldInfo& field_info) { |
| 2942 | DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet()); |
| 2943 | |
| 2944 | LocationSummary* locations = |
| 2945 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); |
| 2946 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2947 | |
| 2948 | Primitive::Type field_type = field_info.GetFieldType(); |
| 2949 | if (Primitive::IsFloatingPointType(field_type)) { |
| 2950 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 2951 | } else { |
| 2952 | locations->SetInAt(1, Location::RequiresRegister()); |
| 2953 | } |
| 2954 | |
| 2955 | bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble; |
| 2956 | bool generate_volatile = field_info.IsVolatile() |
| 2957 | && is_wide |
| 2958 | && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd(); |
| 2959 | bool needs_write_barrier = |
| 2960 | CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1)); |
| 2961 | // Temporary registers for the write barrier. |
| 2962 | // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark. |
| 2963 | if (needs_write_barrier) { |
| 2964 | locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too. |
| 2965 | locations->AddTemp(Location::RequiresRegister()); |
| 2966 | } else if (generate_volatile) { |
| 2967 | // ARM encoding have some additional constraints for ldrexd/strexd: |
| 2968 | // - registers need to be consecutive |
| 2969 | // - the first register should be even but not R14. |
| 2970 | // We don't test for ARM yet, and the assertion makes sure that we |
| 2971 | // revisit this if we ever enable ARM encoding. |
| 2972 | DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet()); |
| 2973 | |
| 2974 | locations->AddTemp(Location::RequiresRegister()); |
| 2975 | locations->AddTemp(Location::RequiresRegister()); |
| 2976 | if (field_type == Primitive::kPrimDouble) { |
| 2977 | // For doubles we need two more registers to copy the value. |
| 2978 | locations->AddTemp(LocationFrom(r2)); |
| 2979 | locations->AddTemp(LocationFrom(r3)); |
| 2980 | } |
| 2981 | } |
| 2982 | } |
| 2983 | |
| 2984 | void InstructionCodeGeneratorARMVIXL::HandleFieldSet(HInstruction* instruction, |
| 2985 | const FieldInfo& field_info, |
| 2986 | bool value_can_be_null) { |
| 2987 | DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet()); |
| 2988 | |
| 2989 | LocationSummary* locations = instruction->GetLocations(); |
| 2990 | vixl32::Register base = InputRegisterAt(instruction, 0); |
| 2991 | Location value = locations->InAt(1); |
| 2992 | |
| 2993 | bool is_volatile = field_info.IsVolatile(); |
| 2994 | bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd(); |
| 2995 | Primitive::Type field_type = field_info.GetFieldType(); |
| 2996 | uint32_t offset = field_info.GetFieldOffset().Uint32Value(); |
| 2997 | bool needs_write_barrier = |
| 2998 | CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1)); |
| 2999 | |
| 3000 | if (is_volatile) { |
| 3001 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore); |
| 3002 | } |
| 3003 | |
| 3004 | switch (field_type) { |
| 3005 | case Primitive::kPrimBoolean: |
| 3006 | case Primitive::kPrimByte: { |
| 3007 | GetAssembler()->StoreToOffset(kStoreByte, RegisterFrom(value), base, offset); |
| 3008 | break; |
| 3009 | } |
| 3010 | |
| 3011 | case Primitive::kPrimShort: |
| 3012 | case Primitive::kPrimChar: { |
| 3013 | GetAssembler()->StoreToOffset(kStoreHalfword, RegisterFrom(value), base, offset); |
| 3014 | break; |
| 3015 | } |
| 3016 | |
| 3017 | case Primitive::kPrimInt: |
| 3018 | case Primitive::kPrimNot: { |
| 3019 | if (kPoisonHeapReferences && needs_write_barrier) { |
| 3020 | // Note that in the case where `value` is a null reference, |
| 3021 | // we do not enter this block, as a null reference does not |
| 3022 | // need poisoning. |
| 3023 | DCHECK_EQ(field_type, Primitive::kPrimNot); |
| 3024 | vixl32::Register temp = RegisterFrom(locations->GetTemp(0)); |
| 3025 | __ Mov(temp, RegisterFrom(value)); |
| 3026 | GetAssembler()->PoisonHeapReference(temp); |
| 3027 | GetAssembler()->StoreToOffset(kStoreWord, temp, base, offset); |
| 3028 | } else { |
| 3029 | GetAssembler()->StoreToOffset(kStoreWord, RegisterFrom(value), base, offset); |
| 3030 | } |
| 3031 | break; |
| 3032 | } |
| 3033 | |
| 3034 | case Primitive::kPrimLong: { |
| 3035 | if (is_volatile && !atomic_ldrd_strd) { |
| 3036 | GenerateWideAtomicStore(base, |
| 3037 | offset, |
| 3038 | LowRegisterFrom(value), |
| 3039 | HighRegisterFrom(value), |
| 3040 | RegisterFrom(locations->GetTemp(0)), |
| 3041 | RegisterFrom(locations->GetTemp(1)), |
| 3042 | instruction); |
| 3043 | } else { |
| 3044 | GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), base, offset); |
| 3045 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3046 | } |
| 3047 | break; |
| 3048 | } |
| 3049 | |
| 3050 | case Primitive::kPrimFloat: { |
| 3051 | GetAssembler()->StoreSToOffset(SRegisterFrom(value), base, offset); |
| 3052 | break; |
| 3053 | } |
| 3054 | |
| 3055 | case Primitive::kPrimDouble: { |
| 3056 | vixl32::DRegister value_reg = FromLowSToD(LowSRegisterFrom(value)); |
| 3057 | if (is_volatile && !atomic_ldrd_strd) { |
| 3058 | vixl32::Register value_reg_lo = RegisterFrom(locations->GetTemp(0)); |
| 3059 | vixl32::Register value_reg_hi = RegisterFrom(locations->GetTemp(1)); |
| 3060 | |
| 3061 | __ Vmov(value_reg_lo, value_reg_hi, value_reg); |
| 3062 | |
| 3063 | GenerateWideAtomicStore(base, |
| 3064 | offset, |
| 3065 | value_reg_lo, |
| 3066 | value_reg_hi, |
| 3067 | RegisterFrom(locations->GetTemp(2)), |
| 3068 | RegisterFrom(locations->GetTemp(3)), |
| 3069 | instruction); |
| 3070 | } else { |
| 3071 | GetAssembler()->StoreDToOffset(value_reg, base, offset); |
| 3072 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3073 | } |
| 3074 | break; |
| 3075 | } |
| 3076 | |
| 3077 | case Primitive::kPrimVoid: |
| 3078 | LOG(FATAL) << "Unreachable type " << field_type; |
| 3079 | UNREACHABLE(); |
| 3080 | } |
| 3081 | |
| 3082 | // Longs and doubles are handled in the switch. |
| 3083 | if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) { |
| 3084 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3085 | } |
| 3086 | |
| 3087 | if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) { |
| 3088 | vixl32::Register temp = RegisterFrom(locations->GetTemp(0)); |
| 3089 | vixl32::Register card = RegisterFrom(locations->GetTemp(1)); |
| 3090 | codegen_->MarkGCCard(temp, card, base, RegisterFrom(value), value_can_be_null); |
| 3091 | } |
| 3092 | |
| 3093 | if (is_volatile) { |
| 3094 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny); |
| 3095 | } |
| 3096 | } |
| 3097 | |
Artem Serov | 02d3783 | 2016-10-25 15:25:33 +0100 | [diff] [blame^] | 3098 | void LocationsBuilderARMVIXL::HandleFieldGet(HInstruction* instruction, |
| 3099 | const FieldInfo& field_info) { |
| 3100 | DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet()); |
| 3101 | |
| 3102 | bool object_field_get_with_read_barrier = |
| 3103 | kEmitCompilerReadBarrier && (field_info.GetFieldType() == Primitive::kPrimNot); |
| 3104 | LocationSummary* locations = |
| 3105 | new (GetGraph()->GetArena()) LocationSummary(instruction, |
| 3106 | object_field_get_with_read_barrier ? |
| 3107 | LocationSummary::kCallOnSlowPath : |
| 3108 | LocationSummary::kNoCall); |
| 3109 | if (object_field_get_with_read_barrier && kUseBakerReadBarrier) { |
| 3110 | locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers. |
| 3111 | } |
| 3112 | locations->SetInAt(0, Location::RequiresRegister()); |
| 3113 | |
| 3114 | bool volatile_for_double = field_info.IsVolatile() |
| 3115 | && (field_info.GetFieldType() == Primitive::kPrimDouble) |
| 3116 | && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd(); |
| 3117 | // The output overlaps in case of volatile long: we don't want the |
| 3118 | // code generated by GenerateWideAtomicLoad to overwrite the |
| 3119 | // object's location. Likewise, in the case of an object field get |
| 3120 | // with read barriers enabled, we do not want the load to overwrite |
| 3121 | // the object's location, as we need it to emit the read barrier. |
| 3122 | bool overlap = (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) || |
| 3123 | object_field_get_with_read_barrier; |
| 3124 | |
| 3125 | if (Primitive::IsFloatingPointType(instruction->GetType())) { |
| 3126 | locations->SetOut(Location::RequiresFpuRegister()); |
| 3127 | } else { |
| 3128 | locations->SetOut(Location::RequiresRegister(), |
| 3129 | (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap)); |
| 3130 | } |
| 3131 | if (volatile_for_double) { |
| 3132 | // ARM encoding have some additional constraints for ldrexd/strexd: |
| 3133 | // - registers need to be consecutive |
| 3134 | // - the first register should be even but not R14. |
| 3135 | // We don't test for ARM yet, and the assertion makes sure that we |
| 3136 | // revisit this if we ever enable ARM encoding. |
| 3137 | DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet()); |
| 3138 | locations->AddTemp(Location::RequiresRegister()); |
| 3139 | locations->AddTemp(Location::RequiresRegister()); |
| 3140 | } else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) { |
| 3141 | // We need a temporary register for the read barrier marking slow |
| 3142 | // path in CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier. |
| 3143 | locations->AddTemp(Location::RequiresRegister()); |
| 3144 | } |
| 3145 | } |
| 3146 | |
| 3147 | Location LocationsBuilderARMVIXL::ArithmeticZeroOrFpuRegister(HInstruction* input) { |
| 3148 | DCHECK(Primitive::IsFloatingPointType(input->GetType())) << input->GetType(); |
| 3149 | if ((input->IsFloatConstant() && (input->AsFloatConstant()->IsArithmeticZero())) || |
| 3150 | (input->IsDoubleConstant() && (input->AsDoubleConstant()->IsArithmeticZero()))) { |
| 3151 | return Location::ConstantLocation(input->AsConstant()); |
| 3152 | } else { |
| 3153 | return Location::RequiresFpuRegister(); |
| 3154 | } |
| 3155 | } |
| 3156 | |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 3157 | Location LocationsBuilderARMVIXL::ArmEncodableConstantOrRegister(HInstruction* constant, |
| 3158 | Opcode opcode) { |
| 3159 | DCHECK(!Primitive::IsFloatingPointType(constant->GetType())); |
| 3160 | if (constant->IsConstant() && |
| 3161 | CanEncodeConstantAsImmediate(constant->AsConstant(), opcode)) { |
| 3162 | return Location::ConstantLocation(constant->AsConstant()); |
| 3163 | } |
| 3164 | return Location::RequiresRegister(); |
| 3165 | } |
| 3166 | |
| 3167 | bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(HConstant* input_cst, |
| 3168 | Opcode opcode) { |
| 3169 | uint64_t value = static_cast<uint64_t>(Int64FromConstant(input_cst)); |
| 3170 | if (Primitive::Is64BitType(input_cst->GetType())) { |
| 3171 | Opcode high_opcode = opcode; |
| 3172 | SetCc low_set_cc = kCcDontCare; |
| 3173 | switch (opcode) { |
| 3174 | case SUB: |
| 3175 | // Flip the operation to an ADD. |
| 3176 | value = -value; |
| 3177 | opcode = ADD; |
| 3178 | FALLTHROUGH_INTENDED; |
| 3179 | case ADD: |
| 3180 | if (Low32Bits(value) == 0u) { |
| 3181 | return CanEncodeConstantAsImmediate(High32Bits(value), opcode, kCcDontCare); |
| 3182 | } |
| 3183 | high_opcode = ADC; |
| 3184 | low_set_cc = kCcSet; |
| 3185 | break; |
| 3186 | default: |
| 3187 | break; |
| 3188 | } |
| 3189 | return CanEncodeConstantAsImmediate(Low32Bits(value), opcode, low_set_cc) && |
| 3190 | CanEncodeConstantAsImmediate(High32Bits(value), high_opcode, kCcDontCare); |
| 3191 | } else { |
| 3192 | return CanEncodeConstantAsImmediate(Low32Bits(value), opcode); |
| 3193 | } |
| 3194 | } |
| 3195 | |
| 3196 | // TODO(VIXL): Replace art::arm::SetCc` with `vixl32::FlagsUpdate after flags set optimization |
| 3197 | // enabled. |
| 3198 | bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(uint32_t value, |
| 3199 | Opcode opcode, |
| 3200 | SetCc set_cc) { |
| 3201 | ArmVIXLAssembler* assembler = codegen_->GetAssembler(); |
| 3202 | if (assembler->ShifterOperandCanHold(opcode, value, set_cc)) { |
| 3203 | return true; |
| 3204 | } |
| 3205 | Opcode neg_opcode = kNoOperand; |
| 3206 | switch (opcode) { |
| 3207 | case AND: neg_opcode = BIC; value = ~value; break; |
| 3208 | case ORR: neg_opcode = ORN; value = ~value; break; |
| 3209 | case ADD: neg_opcode = SUB; value = -value; break; |
| 3210 | case ADC: neg_opcode = SBC; value = ~value; break; |
| 3211 | case SUB: neg_opcode = ADD; value = -value; break; |
| 3212 | case SBC: neg_opcode = ADC; value = ~value; break; |
| 3213 | default: |
| 3214 | return false; |
| 3215 | } |
| 3216 | return assembler->ShifterOperandCanHold(neg_opcode, value, set_cc); |
| 3217 | } |
| 3218 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3219 | void InstructionCodeGeneratorARMVIXL::HandleFieldGet(HInstruction* instruction, |
| 3220 | const FieldInfo& field_info) { |
| 3221 | DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet()); |
| 3222 | |
| 3223 | LocationSummary* locations = instruction->GetLocations(); |
| 3224 | vixl32::Register base = InputRegisterAt(instruction, 0); |
| 3225 | Location out = locations->Out(); |
| 3226 | bool is_volatile = field_info.IsVolatile(); |
| 3227 | bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd(); |
| 3228 | Primitive::Type field_type = field_info.GetFieldType(); |
| 3229 | uint32_t offset = field_info.GetFieldOffset().Uint32Value(); |
| 3230 | |
| 3231 | switch (field_type) { |
| 3232 | case Primitive::kPrimBoolean: |
| 3233 | GetAssembler()->LoadFromOffset(kLoadUnsignedByte, RegisterFrom(out), base, offset); |
| 3234 | break; |
| 3235 | |
| 3236 | case Primitive::kPrimByte: |
| 3237 | GetAssembler()->LoadFromOffset(kLoadSignedByte, RegisterFrom(out), base, offset); |
| 3238 | break; |
| 3239 | |
| 3240 | case Primitive::kPrimShort: |
| 3241 | GetAssembler()->LoadFromOffset(kLoadSignedHalfword, RegisterFrom(out), base, offset); |
| 3242 | break; |
| 3243 | |
| 3244 | case Primitive::kPrimChar: |
| 3245 | GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, RegisterFrom(out), base, offset); |
| 3246 | break; |
| 3247 | |
| 3248 | case Primitive::kPrimInt: |
| 3249 | GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset); |
| 3250 | break; |
| 3251 | |
| 3252 | case Primitive::kPrimNot: { |
| 3253 | // /* HeapReference<Object> */ out = *(base + offset) |
| 3254 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 3255 | TODO_VIXL32(FATAL); |
| 3256 | } else { |
| 3257 | GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset); |
| 3258 | // TODO(VIXL): Scope to guarantee the position immediately after the load. |
| 3259 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3260 | if (is_volatile) { |
| 3261 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); |
| 3262 | } |
| 3263 | // If read barriers are enabled, emit read barriers other than |
| 3264 | // Baker's using a slow path (and also unpoison the loaded |
| 3265 | // reference, if heap poisoning is enabled). |
| 3266 | codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, locations->InAt(0), offset); |
| 3267 | } |
| 3268 | break; |
| 3269 | } |
| 3270 | |
| 3271 | case Primitive::kPrimLong: |
| 3272 | if (is_volatile && !atomic_ldrd_strd) { |
| 3273 | GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out)); |
| 3274 | } else { |
| 3275 | GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset); |
| 3276 | } |
| 3277 | break; |
| 3278 | |
| 3279 | case Primitive::kPrimFloat: |
| 3280 | GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset); |
| 3281 | break; |
| 3282 | |
| 3283 | case Primitive::kPrimDouble: { |
| 3284 | vixl32::DRegister out_dreg = FromLowSToD(LowSRegisterFrom(out)); |
| 3285 | if (is_volatile && !atomic_ldrd_strd) { |
| 3286 | vixl32::Register lo = RegisterFrom(locations->GetTemp(0)); |
| 3287 | vixl32::Register hi = RegisterFrom(locations->GetTemp(1)); |
| 3288 | GenerateWideAtomicLoad(base, offset, lo, hi); |
| 3289 | // TODO(VIXL): Do we need to be immediately after the ldrexd instruction? If so we need a |
| 3290 | // scope. |
| 3291 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3292 | __ Vmov(out_dreg, lo, hi); |
| 3293 | } else { |
| 3294 | GetAssembler()->LoadDFromOffset(out_dreg, base, offset); |
| 3295 | // TODO(VIXL): Scope to guarantee the position immediately after the load. |
| 3296 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3297 | } |
| 3298 | break; |
| 3299 | } |
| 3300 | |
| 3301 | case Primitive::kPrimVoid: |
| 3302 | LOG(FATAL) << "Unreachable type " << field_type; |
| 3303 | UNREACHABLE(); |
| 3304 | } |
| 3305 | |
| 3306 | if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimDouble) { |
| 3307 | // Potential implicit null checks, in the case of reference or |
| 3308 | // double fields, are handled in the previous switch statement. |
| 3309 | } else { |
| 3310 | // Address cases other than reference and double that may require an implicit null check. |
| 3311 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3312 | } |
| 3313 | |
| 3314 | if (is_volatile) { |
| 3315 | if (field_type == Primitive::kPrimNot) { |
| 3316 | // Memory barriers, in the case of references, are also handled |
| 3317 | // in the previous switch statement. |
| 3318 | } else { |
| 3319 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); |
| 3320 | } |
| 3321 | } |
| 3322 | } |
| 3323 | |
| 3324 | void LocationsBuilderARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { |
| 3325 | HandleFieldSet(instruction, instruction->GetFieldInfo()); |
| 3326 | } |
| 3327 | |
| 3328 | void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { |
| 3329 | HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull()); |
| 3330 | } |
| 3331 | |
| 3332 | void LocationsBuilderARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { |
| 3333 | HandleFieldGet(instruction, instruction->GetFieldInfo()); |
| 3334 | } |
| 3335 | |
| 3336 | void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { |
| 3337 | HandleFieldGet(instruction, instruction->GetFieldInfo()); |
| 3338 | } |
| 3339 | |
| 3340 | void LocationsBuilderARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) { |
| 3341 | HandleFieldGet(instruction, instruction->GetFieldInfo()); |
| 3342 | } |
| 3343 | |
| 3344 | void InstructionCodeGeneratorARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) { |
| 3345 | HandleFieldGet(instruction, instruction->GetFieldInfo()); |
| 3346 | } |
| 3347 | |
| 3348 | void LocationsBuilderARMVIXL::VisitNullCheck(HNullCheck* instruction) { |
| 3349 | // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/ |
| 3350 | LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock() |
| 3351 | ? LocationSummary::kCallOnSlowPath |
| 3352 | : LocationSummary::kNoCall; |
| 3353 | LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); |
| 3354 | locations->SetInAt(0, Location::RequiresRegister()); |
| 3355 | if (instruction->HasUses()) { |
| 3356 | locations->SetOut(Location::SameAsFirstInput()); |
| 3357 | } |
| 3358 | } |
| 3359 | |
| 3360 | void CodeGeneratorARMVIXL::GenerateImplicitNullCheck(HNullCheck* instruction) { |
| 3361 | if (CanMoveNullCheckToUser(instruction)) { |
| 3362 | return; |
| 3363 | } |
| 3364 | |
| 3365 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 3366 | AssemblerAccurateScope aas(GetVIXLAssembler(), |
| 3367 | kArmInstrMaxSizeInBytes, |
| 3368 | CodeBufferCheckScope::kMaximumSize); |
| 3369 | __ ldr(temps.Acquire(), MemOperand(InputRegisterAt(instruction, 0))); |
| 3370 | RecordPcInfo(instruction, instruction->GetDexPc()); |
| 3371 | } |
| 3372 | |
| 3373 | void CodeGeneratorARMVIXL::GenerateExplicitNullCheck(HNullCheck* instruction) { |
| 3374 | NullCheckSlowPathARMVIXL* slow_path = |
| 3375 | new (GetGraph()->GetArena()) NullCheckSlowPathARMVIXL(instruction); |
| 3376 | AddSlowPath(slow_path); |
| 3377 | __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel()); |
| 3378 | } |
| 3379 | |
| 3380 | void InstructionCodeGeneratorARMVIXL::VisitNullCheck(HNullCheck* instruction) { |
| 3381 | codegen_->GenerateNullCheck(instruction); |
| 3382 | } |
| 3383 | |
| 3384 | void LocationsBuilderARMVIXL::VisitArrayLength(HArrayLength* instruction) { |
| 3385 | LocationSummary* locations = |
| 3386 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); |
| 3387 | locations->SetInAt(0, Location::RequiresRegister()); |
| 3388 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 3389 | } |
| 3390 | |
| 3391 | void InstructionCodeGeneratorARMVIXL::VisitArrayLength(HArrayLength* instruction) { |
| 3392 | uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction); |
| 3393 | vixl32::Register obj = InputRegisterAt(instruction, 0); |
| 3394 | vixl32::Register out = OutputRegister(instruction); |
| 3395 | GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset); |
| 3396 | codegen_->MaybeRecordImplicitNullCheck(instruction); |
| 3397 | // TODO(VIXL): https://android-review.googlesource.com/#/c/272625/ |
| 3398 | } |
| 3399 | |
| 3400 | void CodeGeneratorARMVIXL::MarkGCCard(vixl32::Register temp, |
| 3401 | vixl32::Register card, |
| 3402 | vixl32::Register object, |
| 3403 | vixl32::Register value, |
| 3404 | bool can_be_null) { |
| 3405 | vixl32::Label is_null; |
| 3406 | if (can_be_null) { |
| 3407 | __ Cbz(value, &is_null); |
| 3408 | } |
| 3409 | GetAssembler()->LoadFromOffset( |
| 3410 | kLoadWord, card, tr, Thread::CardTableOffset<kArmPointerSize>().Int32Value()); |
| 3411 | __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); |
| 3412 | __ Strb(card, MemOperand(card, temp)); |
| 3413 | if (can_be_null) { |
| 3414 | __ Bind(&is_null); |
| 3415 | } |
| 3416 | } |
| 3417 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3418 | void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) { |
| 3419 | LOG(FATAL) << "Unreachable"; |
| 3420 | } |
| 3421 | |
| 3422 | void InstructionCodeGeneratorARMVIXL::VisitParallelMove(HParallelMove* instruction) { |
| 3423 | codegen_->GetMoveResolver()->EmitNativeCode(instruction); |
| 3424 | } |
| 3425 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3426 | void LocationsBuilderARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) { |
| 3427 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); |
| 3428 | // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/ and related. |
| 3429 | } |
| 3430 | |
| 3431 | void InstructionCodeGeneratorARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) { |
| 3432 | HBasicBlock* block = instruction->GetBlock(); |
| 3433 | if (block->GetLoopInformation() != nullptr) { |
| 3434 | DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); |
| 3435 | // The back edge will generate the suspend check. |
| 3436 | return; |
| 3437 | } |
| 3438 | if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { |
| 3439 | // The goto will generate the suspend check. |
| 3440 | return; |
| 3441 | } |
| 3442 | GenerateSuspendCheck(instruction, nullptr); |
| 3443 | } |
| 3444 | |
| 3445 | void InstructionCodeGeneratorARMVIXL::GenerateSuspendCheck(HSuspendCheck* instruction, |
| 3446 | HBasicBlock* successor) { |
| 3447 | SuspendCheckSlowPathARMVIXL* slow_path = |
| 3448 | down_cast<SuspendCheckSlowPathARMVIXL*>(instruction->GetSlowPath()); |
| 3449 | if (slow_path == nullptr) { |
| 3450 | slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARMVIXL(instruction, successor); |
| 3451 | instruction->SetSlowPath(slow_path); |
| 3452 | codegen_->AddSlowPath(slow_path); |
| 3453 | if (successor != nullptr) { |
| 3454 | DCHECK(successor->IsLoopHeader()); |
| 3455 | codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction); |
| 3456 | } |
| 3457 | } else { |
| 3458 | DCHECK_EQ(slow_path->GetSuccessor(), successor); |
| 3459 | } |
| 3460 | |
| 3461 | UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler()); |
| 3462 | vixl32::Register temp = temps.Acquire(); |
| 3463 | GetAssembler()->LoadFromOffset( |
| 3464 | kLoadUnsignedHalfword, temp, tr, Thread::ThreadFlagsOffset<kArmPointerSize>().Int32Value()); |
| 3465 | if (successor == nullptr) { |
| 3466 | __ Cbnz(temp, slow_path->GetEntryLabel()); |
| 3467 | __ Bind(slow_path->GetReturnLabel()); |
| 3468 | } else { |
| 3469 | __ Cbz(temp, codegen_->GetLabelOf(successor)); |
| 3470 | __ B(slow_path->GetEntryLabel()); |
| 3471 | } |
| 3472 | } |
| 3473 | |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3474 | ArmVIXLAssembler* ParallelMoveResolverARMVIXL::GetAssembler() const { |
| 3475 | return codegen_->GetAssembler(); |
| 3476 | } |
| 3477 | |
| 3478 | void ParallelMoveResolverARMVIXL::EmitMove(size_t index) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3479 | UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler()); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3480 | MoveOperands* move = moves_[index]; |
| 3481 | Location source = move->GetSource(); |
| 3482 | Location destination = move->GetDestination(); |
| 3483 | |
| 3484 | if (source.IsRegister()) { |
| 3485 | if (destination.IsRegister()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3486 | __ Mov(RegisterFrom(destination), RegisterFrom(source)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3487 | } else if (destination.IsFpuRegister()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3488 | __ Vmov(SRegisterFrom(destination), RegisterFrom(source)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3489 | } else { |
| 3490 | DCHECK(destination.IsStackSlot()); |
| 3491 | GetAssembler()->StoreToOffset(kStoreWord, |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3492 | RegisterFrom(source), |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3493 | sp, |
| 3494 | destination.GetStackIndex()); |
| 3495 | } |
| 3496 | } else if (source.IsStackSlot()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3497 | if (destination.IsRegister()) { |
| 3498 | GetAssembler()->LoadFromOffset(kLoadWord, |
| 3499 | RegisterFrom(destination), |
| 3500 | sp, |
| 3501 | source.GetStackIndex()); |
| 3502 | } else if (destination.IsFpuRegister()) { |
| 3503 | GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex()); |
| 3504 | } else { |
| 3505 | DCHECK(destination.IsStackSlot()); |
| 3506 | vixl32::Register temp = temps.Acquire(); |
| 3507 | GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex()); |
| 3508 | GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex()); |
| 3509 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3510 | } else if (source.IsFpuRegister()) { |
Alexandre Rames | b45fbaa5 | 2016-10-17 14:57:13 +0100 | [diff] [blame] | 3511 | if (destination.IsRegister()) { |
| 3512 | TODO_VIXL32(FATAL); |
| 3513 | } else if (destination.IsFpuRegister()) { |
| 3514 | __ Vmov(SRegisterFrom(destination), SRegisterFrom(source)); |
| 3515 | } else { |
| 3516 | DCHECK(destination.IsStackSlot()); |
| 3517 | GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex()); |
| 3518 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3519 | } else if (source.IsDoubleStackSlot()) { |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 3520 | if (destination.IsDoubleStackSlot()) { |
| 3521 | vixl32::DRegister temp = temps.AcquireD(); |
| 3522 | GetAssembler()->LoadDFromOffset(temp, sp, source.GetStackIndex()); |
| 3523 | GetAssembler()->StoreDToOffset(temp, sp, destination.GetStackIndex()); |
| 3524 | } else if (destination.IsRegisterPair()) { |
| 3525 | DCHECK(ExpectedPairLayout(destination)); |
| 3526 | GetAssembler()->LoadFromOffset( |
| 3527 | kLoadWordPair, LowRegisterFrom(destination), sp, source.GetStackIndex()); |
| 3528 | } else { |
Alexandre Rames | b45fbaa5 | 2016-10-17 14:57:13 +0100 | [diff] [blame] | 3529 | DCHECK(destination.IsFpuRegisterPair()) << destination; |
| 3530 | GetAssembler()->LoadDFromOffset(DRegisterFrom(destination), sp, source.GetStackIndex()); |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 3531 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3532 | } else if (source.IsRegisterPair()) { |
| 3533 | if (destination.IsRegisterPair()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3534 | __ Mov(LowRegisterFrom(destination), LowRegisterFrom(source)); |
| 3535 | __ Mov(HighRegisterFrom(destination), HighRegisterFrom(source)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3536 | } else if (destination.IsFpuRegisterPair()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3537 | __ Vmov(FromLowSToD(LowSRegisterFrom(destination)), |
| 3538 | LowRegisterFrom(source), |
| 3539 | HighRegisterFrom(source)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3540 | } else { |
| 3541 | DCHECK(destination.IsDoubleStackSlot()) << destination; |
| 3542 | DCHECK(ExpectedPairLayout(source)); |
| 3543 | GetAssembler()->StoreToOffset(kStoreWordPair, |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3544 | LowRegisterFrom(source), |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3545 | sp, |
| 3546 | destination.GetStackIndex()); |
| 3547 | } |
| 3548 | } else if (source.IsFpuRegisterPair()) { |
Alexandre Rames | b45fbaa5 | 2016-10-17 14:57:13 +0100 | [diff] [blame] | 3549 | if (destination.IsRegisterPair()) { |
| 3550 | TODO_VIXL32(FATAL); |
| 3551 | } else if (destination.IsFpuRegisterPair()) { |
| 3552 | __ Vmov(DRegisterFrom(destination), DRegisterFrom(source)); |
| 3553 | } else { |
| 3554 | DCHECK(destination.IsDoubleStackSlot()) << destination; |
| 3555 | GetAssembler()->StoreDToOffset(DRegisterFrom(source), sp, destination.GetStackIndex()); |
| 3556 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3557 | } else { |
| 3558 | DCHECK(source.IsConstant()) << source; |
| 3559 | HConstant* constant = source.GetConstant(); |
| 3560 | if (constant->IsIntConstant() || constant->IsNullConstant()) { |
| 3561 | int32_t value = CodeGenerator::GetInt32ValueOf(constant); |
| 3562 | if (destination.IsRegister()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3563 | __ Mov(RegisterFrom(destination), value); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3564 | } else { |
| 3565 | DCHECK(destination.IsStackSlot()); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3566 | vixl32::Register temp = temps.Acquire(); |
| 3567 | __ Mov(temp, value); |
| 3568 | GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex()); |
| 3569 | } |
| 3570 | } else if (constant->IsLongConstant()) { |
| 3571 | int64_t value = constant->AsLongConstant()->GetValue(); |
| 3572 | if (destination.IsRegisterPair()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3573 | __ Mov(LowRegisterFrom(destination), Low32Bits(value)); |
| 3574 | __ Mov(HighRegisterFrom(destination), High32Bits(value)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3575 | } else { |
| 3576 | DCHECK(destination.IsDoubleStackSlot()) << destination; |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3577 | vixl32::Register temp = temps.Acquire(); |
| 3578 | __ Mov(temp, Low32Bits(value)); |
| 3579 | GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex()); |
| 3580 | __ Mov(temp, High32Bits(value)); |
| 3581 | GetAssembler()->StoreToOffset(kStoreWord, |
| 3582 | temp, |
| 3583 | sp, |
| 3584 | destination.GetHighStackIndex(kArmWordSize)); |
| 3585 | } |
| 3586 | } else if (constant->IsDoubleConstant()) { |
| 3587 | double value = constant->AsDoubleConstant()->GetValue(); |
| 3588 | if (destination.IsFpuRegisterPair()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3589 | __ Vmov(FromLowSToD(LowSRegisterFrom(destination)), value); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3590 | } else { |
| 3591 | DCHECK(destination.IsDoubleStackSlot()) << destination; |
| 3592 | uint64_t int_value = bit_cast<uint64_t, double>(value); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3593 | vixl32::Register temp = temps.Acquire(); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3594 | __ Mov(temp, Low32Bits(int_value)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3595 | GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex()); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3596 | __ Mov(temp, High32Bits(int_value)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3597 | GetAssembler()->StoreToOffset(kStoreWord, |
| 3598 | temp, |
| 3599 | sp, |
| 3600 | destination.GetHighStackIndex(kArmWordSize)); |
| 3601 | } |
| 3602 | } else { |
| 3603 | DCHECK(constant->IsFloatConstant()) << constant->DebugName(); |
| 3604 | float value = constant->AsFloatConstant()->GetValue(); |
| 3605 | if (destination.IsFpuRegister()) { |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3606 | __ Vmov(SRegisterFrom(destination), value); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3607 | } else { |
| 3608 | DCHECK(destination.IsStackSlot()); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3609 | vixl32::Register temp = temps.Acquire(); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3610 | __ Mov(temp, bit_cast<int32_t, float>(value)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3611 | GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex()); |
| 3612 | } |
| 3613 | } |
| 3614 | } |
| 3615 | } |
| 3616 | |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 3617 | void ParallelMoveResolverARMVIXL::Exchange(vixl32::Register reg, int mem) { |
| 3618 | UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler()); |
| 3619 | vixl32::Register temp = temps.Acquire(); |
| 3620 | __ Mov(temp, reg); |
| 3621 | GetAssembler()->LoadFromOffset(kLoadWord, reg, sp, mem); |
| 3622 | GetAssembler()->StoreToOffset(kStoreWord, temp, sp, mem); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3623 | } |
| 3624 | |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 3625 | void ParallelMoveResolverARMVIXL::Exchange(int mem1, int mem2) { |
| 3626 | // TODO(VIXL32): Double check the performance of this implementation. |
| 3627 | UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler()); |
| 3628 | vixl32::Register temp = temps.Acquire(); |
| 3629 | vixl32::SRegister temp_s = temps.AcquireS(); |
| 3630 | |
| 3631 | __ Ldr(temp, MemOperand(sp, mem1)); |
| 3632 | __ Vldr(temp_s, MemOperand(sp, mem2)); |
| 3633 | __ Str(temp, MemOperand(sp, mem2)); |
| 3634 | __ Vstr(temp_s, MemOperand(sp, mem1)); |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3635 | } |
| 3636 | |
Alexandre Rames | 9c19bd6 | 2016-10-24 11:50:32 +0100 | [diff] [blame] | 3637 | void ParallelMoveResolverARMVIXL::EmitSwap(size_t index) { |
| 3638 | MoveOperands* move = moves_[index]; |
| 3639 | Location source = move->GetSource(); |
| 3640 | Location destination = move->GetDestination(); |
| 3641 | UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler()); |
| 3642 | |
| 3643 | if (source.IsRegister() && destination.IsRegister()) { |
| 3644 | vixl32::Register temp = temps.Acquire(); |
| 3645 | DCHECK(!RegisterFrom(source).Is(temp)); |
| 3646 | DCHECK(!RegisterFrom(destination).Is(temp)); |
| 3647 | __ Mov(temp, RegisterFrom(destination)); |
| 3648 | __ Mov(RegisterFrom(destination), RegisterFrom(source)); |
| 3649 | __ Mov(RegisterFrom(source), temp); |
| 3650 | } else if (source.IsRegister() && destination.IsStackSlot()) { |
| 3651 | Exchange(RegisterFrom(source), destination.GetStackIndex()); |
| 3652 | } else if (source.IsStackSlot() && destination.IsRegister()) { |
| 3653 | Exchange(RegisterFrom(destination), source.GetStackIndex()); |
| 3654 | } else if (source.IsStackSlot() && destination.IsStackSlot()) { |
| 3655 | TODO_VIXL32(FATAL); |
| 3656 | } else if (source.IsFpuRegister() && destination.IsFpuRegister()) { |
| 3657 | TODO_VIXL32(FATAL); |
| 3658 | } else if (source.IsRegisterPair() && destination.IsRegisterPair()) { |
| 3659 | vixl32::DRegister temp = temps.AcquireD(); |
| 3660 | __ Vmov(temp, LowRegisterFrom(source), HighRegisterFrom(source)); |
| 3661 | __ Mov(LowRegisterFrom(source), LowRegisterFrom(destination)); |
| 3662 | __ Mov(HighRegisterFrom(source), HighRegisterFrom(destination)); |
| 3663 | __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), temp); |
| 3664 | } else if (source.IsRegisterPair() || destination.IsRegisterPair()) { |
| 3665 | vixl32::Register low_reg = LowRegisterFrom(source.IsRegisterPair() ? source : destination); |
| 3666 | int mem = source.IsRegisterPair() ? destination.GetStackIndex() : source.GetStackIndex(); |
| 3667 | DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination)); |
| 3668 | vixl32::DRegister temp = temps.AcquireD(); |
| 3669 | __ Vmov(temp, low_reg, vixl32::Register(low_reg.GetCode() + 1)); |
| 3670 | GetAssembler()->LoadFromOffset(kLoadWordPair, low_reg, sp, mem); |
| 3671 | GetAssembler()->StoreDToOffset(temp, sp, mem); |
| 3672 | } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) { |
| 3673 | TODO_VIXL32(FATAL); |
| 3674 | } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) { |
| 3675 | TODO_VIXL32(FATAL); |
| 3676 | } else if (source.IsFpuRegister() || destination.IsFpuRegister()) { |
| 3677 | TODO_VIXL32(FATAL); |
| 3678 | } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) { |
| 3679 | vixl32::DRegister temp1 = temps.AcquireD(); |
| 3680 | vixl32::DRegister temp2 = temps.AcquireD(); |
| 3681 | __ Vldr(temp1, MemOperand(sp, source.GetStackIndex())); |
| 3682 | __ Vldr(temp2, MemOperand(sp, destination.GetStackIndex())); |
| 3683 | __ Vstr(temp1, MemOperand(sp, destination.GetStackIndex())); |
| 3684 | __ Vstr(temp2, MemOperand(sp, source.GetStackIndex())); |
| 3685 | } else { |
| 3686 | LOG(FATAL) << "Unimplemented" << source << " <-> " << destination; |
| 3687 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3688 | } |
| 3689 | |
| 3690 | void ParallelMoveResolverARMVIXL::SpillScratch(int reg ATTRIBUTE_UNUSED) { |
| 3691 | TODO_VIXL32(FATAL); |
| 3692 | } |
| 3693 | |
| 3694 | void ParallelMoveResolverARMVIXL::RestoreScratch(int reg ATTRIBUTE_UNUSED) { |
| 3695 | TODO_VIXL32(FATAL); |
| 3696 | } |
| 3697 | |
Artem Serov | 02d3783 | 2016-10-25 15:25:33 +0100 | [diff] [blame^] | 3698 | // Check if the desired_class_load_kind is supported. If it is, return it, |
| 3699 | // otherwise return a fall-back kind that should be used instead. |
| 3700 | HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind( |
| 3701 | HLoadClass::LoadKind desired_class_load_kind ATTRIBUTE_UNUSED) { |
| 3702 | // TODO(VIXL): Implement optimized code paths. |
| 3703 | return HLoadClass::LoadKind::kDexCacheViaMethod; |
| 3704 | } |
| 3705 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3706 | void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) { |
| 3707 | if (cls->NeedsAccessCheck()) { |
| 3708 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 3709 | CodeGenerator::CreateLoadClassLocationSummary( |
| 3710 | cls, |
| 3711 | LocationFrom(calling_convention.GetRegisterAt(0)), |
| 3712 | LocationFrom(r0), |
| 3713 | /* code_generator_supports_read_barrier */ true); |
| 3714 | return; |
| 3715 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 3716 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3717 | // TODO(VIXL): read barrier code. |
| 3718 | LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier) |
| 3719 | ? LocationSummary::kCallOnSlowPath |
| 3720 | : LocationSummary::kNoCall; |
| 3721 | LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); |
| 3722 | HLoadClass::LoadKind load_kind = cls->GetLoadKind(); |
| 3723 | if (load_kind == HLoadClass::LoadKind::kReferrersClass || |
| 3724 | load_kind == HLoadClass::LoadKind::kDexCacheViaMethod || |
| 3725 | load_kind == HLoadClass::LoadKind::kDexCachePcRelative) { |
| 3726 | locations->SetInAt(0, Location::RequiresRegister()); |
| 3727 | } |
| 3728 | locations->SetOut(Location::RequiresRegister()); |
| 3729 | } |
| 3730 | |
| 3731 | void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) { |
| 3732 | LocationSummary* locations = cls->GetLocations(); |
| 3733 | if (cls->NeedsAccessCheck()) { |
| 3734 | codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex()); |
| 3735 | codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc()); |
| 3736 | CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>(); |
| 3737 | return; |
| 3738 | } |
| 3739 | |
| 3740 | Location out_loc = locations->Out(); |
| 3741 | vixl32::Register out = OutputRegister(cls); |
| 3742 | |
| 3743 | // TODO(VIXL): read barrier code. |
| 3744 | bool generate_null_check = false; |
| 3745 | switch (cls->GetLoadKind()) { |
| 3746 | case HLoadClass::LoadKind::kReferrersClass: { |
| 3747 | DCHECK(!cls->CanCallRuntime()); |
| 3748 | DCHECK(!cls->MustGenerateClinitCheck()); |
| 3749 | // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_ |
| 3750 | vixl32::Register current_method = InputRegisterAt(cls, 0); |
| 3751 | GenerateGcRootFieldLoad(cls, |
| 3752 | out_loc, |
| 3753 | current_method, |
Roland Levillain | 00468f3 | 2016-10-27 18:02:48 +0100 | [diff] [blame] | 3754 | ArtMethod::DeclaringClassOffset().Int32Value(), |
| 3755 | kEmitCompilerReadBarrier); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3756 | break; |
| 3757 | } |
| 3758 | case HLoadClass::LoadKind::kDexCacheViaMethod: { |
| 3759 | // /* GcRoot<mirror::Class>[] */ out = |
| 3760 | // current_method.ptr_sized_fields_->dex_cache_resolved_types_ |
| 3761 | vixl32::Register current_method = InputRegisterAt(cls, 0); |
| 3762 | const int32_t resolved_types_offset = |
| 3763 | ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value(); |
| 3764 | GetAssembler()->LoadFromOffset(kLoadWord, out, current_method, resolved_types_offset); |
| 3765 | // /* GcRoot<mirror::Class> */ out = out[type_index] |
| 3766 | size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex()); |
Roland Levillain | 00468f3 | 2016-10-27 18:02:48 +0100 | [diff] [blame] | 3767 | GenerateGcRootFieldLoad(cls, out_loc, out, offset, kEmitCompilerReadBarrier); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 3768 | generate_null_check = !cls->IsInDexCache(); |
| 3769 | break; |
| 3770 | } |
| 3771 | default: |
| 3772 | TODO_VIXL32(FATAL); |
| 3773 | } |
| 3774 | |
| 3775 | if (generate_null_check || cls->MustGenerateClinitCheck()) { |
| 3776 | DCHECK(cls->CanCallRuntime()); |
| 3777 | LoadClassSlowPathARMVIXL* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL( |
| 3778 | cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); |
| 3779 | codegen_->AddSlowPath(slow_path); |
| 3780 | if (generate_null_check) { |
| 3781 | __ Cbz(out, slow_path->GetEntryLabel()); |
| 3782 | } |
| 3783 | if (cls->MustGenerateClinitCheck()) { |
| 3784 | GenerateClassInitializationCheck(slow_path, out); |
| 3785 | } else { |
| 3786 | __ Bind(slow_path->GetExitLabel()); |
| 3787 | } |
| 3788 | } |
| 3789 | } |
| 3790 | |
Artem Serov | 02d3783 | 2016-10-25 15:25:33 +0100 | [diff] [blame^] | 3791 | void LocationsBuilderARMVIXL::VisitClinitCheck(HClinitCheck* check) { |
| 3792 | LocationSummary* locations = |
| 3793 | new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath); |
| 3794 | locations->SetInAt(0, Location::RequiresRegister()); |
| 3795 | if (check->HasUses()) { |
| 3796 | locations->SetOut(Location::SameAsFirstInput()); |
| 3797 | } |
| 3798 | } |
| 3799 | |
| 3800 | void InstructionCodeGeneratorARMVIXL::VisitClinitCheck(HClinitCheck* check) { |
| 3801 | // We assume the class is not null. |
| 3802 | LoadClassSlowPathARMVIXL* slow_path = |
| 3803 | new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL(check->GetLoadClass(), |
| 3804 | check, |
| 3805 | check->GetDexPc(), |
| 3806 | /* do_clinit */ true); |
| 3807 | codegen_->AddSlowPath(slow_path); |
| 3808 | GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0)); |
| 3809 | } |
| 3810 | |
| 3811 | void InstructionCodeGeneratorARMVIXL::GenerateClassInitializationCheck( |
| 3812 | LoadClassSlowPathARMVIXL* slow_path, vixl32::Register class_reg) { |
| 3813 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 3814 | vixl32::Register temp = temps.Acquire(); |
| 3815 | GetAssembler()->LoadFromOffset(kLoadWord, |
| 3816 | temp, |
| 3817 | class_reg, |
| 3818 | mirror::Class::StatusOffset().Int32Value()); |
| 3819 | __ Cmp(temp, mirror::Class::kStatusInitialized); |
| 3820 | __ B(lt, slow_path->GetEntryLabel()); |
| 3821 | // Even if the initialized flag is set, we may be in a situation where caches are not synced |
| 3822 | // properly. Therefore, we do a memory fence. |
| 3823 | __ Dmb(ISH); |
| 3824 | __ Bind(slow_path->GetExitLabel()); |
| 3825 | } |
| 3826 | |
| 3827 | // Check if the desired_string_load_kind is supported. If it is, return it, |
| 3828 | // otherwise return a fall-back kind that should be used instead. |
| 3829 | HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind( |
| 3830 | HLoadString::LoadKind desired_string_load_kind ATTRIBUTE_UNUSED) { |
| 3831 | // TODO(VIXL): Implement optimized code paths. For now we always use the simpler fallback code. |
| 3832 | return HLoadString::LoadKind::kDexCacheViaMethod; |
| 3833 | } |
| 3834 | |
| 3835 | void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) { |
| 3836 | LocationSummary::CallKind call_kind = load->NeedsEnvironment() |
| 3837 | ? LocationSummary::kCallOnMainOnly |
| 3838 | : LocationSummary::kNoCall; |
| 3839 | LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); |
| 3840 | |
| 3841 | // TODO(VIXL): Implement optimized code paths. |
| 3842 | // See InstructionCodeGeneratorARMVIXL::VisitLoadString. |
| 3843 | HLoadString::LoadKind load_kind = load->GetLoadKind(); |
| 3844 | if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) { |
| 3845 | locations->SetInAt(0, Location::RequiresRegister()); |
| 3846 | // TODO(VIXL): Use InvokeRuntimeCallingConventionARMVIXL instead. |
| 3847 | locations->SetOut(LocationFrom(r0)); |
| 3848 | } else { |
| 3849 | locations->SetOut(Location::RequiresRegister()); |
| 3850 | } |
| 3851 | } |
| 3852 | |
| 3853 | void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) { |
| 3854 | // TODO(VIXL): Implement optimized code paths. |
| 3855 | // We implemented the simplest solution to get first ART tests passing, we deferred the |
| 3856 | // optimized path until later, we should implement it using ARM64 implementation as a |
| 3857 | // reference. The same related to LocationsBuilderARMVIXL::VisitLoadString. |
| 3858 | |
| 3859 | // TODO: Re-add the compiler code to do string dex cache lookup again. |
| 3860 | DCHECK_EQ(load->GetLoadKind(), HLoadString::LoadKind::kDexCacheViaMethod); |
| 3861 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 3862 | __ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex()); |
| 3863 | codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc()); |
| 3864 | CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); |
| 3865 | } |
| 3866 | |
| 3867 | static int32_t GetExceptionTlsOffset() { |
| 3868 | return Thread::ExceptionOffset<kArmPointerSize>().Int32Value(); |
| 3869 | } |
| 3870 | |
| 3871 | void LocationsBuilderARMVIXL::VisitLoadException(HLoadException* load) { |
| 3872 | LocationSummary* locations = |
| 3873 | new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall); |
| 3874 | locations->SetOut(Location::RequiresRegister()); |
| 3875 | } |
| 3876 | |
| 3877 | void InstructionCodeGeneratorARMVIXL::VisitLoadException(HLoadException* load) { |
| 3878 | vixl32::Register out = OutputRegister(load); |
| 3879 | GetAssembler()->LoadFromOffset(kLoadWord, out, tr, GetExceptionTlsOffset()); |
| 3880 | } |
| 3881 | |
| 3882 | |
| 3883 | void LocationsBuilderARMVIXL::VisitClearException(HClearException* clear) { |
| 3884 | new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall); |
| 3885 | } |
| 3886 | |
| 3887 | void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) { |
| 3888 | UseScratchRegisterScope temps(GetVIXLAssembler()); |
| 3889 | vixl32::Register temp = temps.Acquire(); |
| 3890 | __ Mov(temp, 0); |
| 3891 | GetAssembler()->StoreToOffset(kStoreWord, temp, tr, GetExceptionTlsOffset()); |
| 3892 | } |
| 3893 | |
| 3894 | void LocationsBuilderARMVIXL::VisitThrow(HThrow* instruction) { |
| 3895 | LocationSummary* locations = |
| 3896 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly); |
| 3897 | InvokeRuntimeCallingConventionARMVIXL calling_convention; |
| 3898 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 3899 | } |
| 3900 | |
| 3901 | void InstructionCodeGeneratorARMVIXL::VisitThrow(HThrow* instruction) { |
| 3902 | codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc()); |
| 3903 | CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>(); |
| 3904 | } |
| 3905 | |
Artem Serov | 02109dd | 2016-09-23 17:17:54 +0100 | [diff] [blame] | 3906 | void LocationsBuilderARMVIXL::VisitAnd(HAnd* instruction) { |
| 3907 | HandleBitwiseOperation(instruction, AND); |
| 3908 | } |
| 3909 | |
| 3910 | void LocationsBuilderARMVIXL::VisitOr(HOr* instruction) { |
| 3911 | HandleBitwiseOperation(instruction, ORR); |
| 3912 | } |
| 3913 | |
| 3914 | void LocationsBuilderARMVIXL::VisitXor(HXor* instruction) { |
| 3915 | HandleBitwiseOperation(instruction, EOR); |
| 3916 | } |
| 3917 | |
| 3918 | void LocationsBuilderARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction, Opcode opcode) { |
| 3919 | LocationSummary* locations = |
| 3920 | new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); |
| 3921 | DCHECK(instruction->GetResultType() == Primitive::kPrimInt |
| 3922 | || instruction->GetResultType() == Primitive::kPrimLong); |
| 3923 | // Note: GVN reorders commutative operations to have the constant on the right hand side. |
| 3924 | locations->SetInAt(0, Location::RequiresRegister()); |
| 3925 | locations->SetInAt(1, ArmEncodableConstantOrRegister(instruction->InputAt(1), opcode)); |
| 3926 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 3927 | } |
| 3928 | |
| 3929 | void InstructionCodeGeneratorARMVIXL::VisitAnd(HAnd* instruction) { |
| 3930 | HandleBitwiseOperation(instruction); |
| 3931 | } |
| 3932 | |
| 3933 | void InstructionCodeGeneratorARMVIXL::VisitOr(HOr* instruction) { |
| 3934 | HandleBitwiseOperation(instruction); |
| 3935 | } |
| 3936 | |
| 3937 | void InstructionCodeGeneratorARMVIXL::VisitXor(HXor* instruction) { |
| 3938 | HandleBitwiseOperation(instruction); |
| 3939 | } |
| 3940 | |
| 3941 | // TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl. |
| 3942 | void InstructionCodeGeneratorARMVIXL::GenerateAndConst(vixl32::Register out, |
| 3943 | vixl32::Register first, |
| 3944 | uint32_t value) { |
| 3945 | // Optimize special cases for individual halfs of `and-long` (`and` is simplified earlier). |
| 3946 | if (value == 0xffffffffu) { |
| 3947 | if (!out.Is(first)) { |
| 3948 | __ Mov(out, first); |
| 3949 | } |
| 3950 | return; |
| 3951 | } |
| 3952 | if (value == 0u) { |
| 3953 | __ Mov(out, 0); |
| 3954 | return; |
| 3955 | } |
| 3956 | if (GetAssembler()->ShifterOperandCanHold(AND, value)) { |
| 3957 | __ And(out, first, value); |
| 3958 | } else { |
| 3959 | DCHECK(GetAssembler()->ShifterOperandCanHold(BIC, ~value)); |
| 3960 | __ Bic(out, first, ~value); |
| 3961 | } |
| 3962 | } |
| 3963 | |
| 3964 | // TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl. |
| 3965 | void InstructionCodeGeneratorARMVIXL::GenerateOrrConst(vixl32::Register out, |
| 3966 | vixl32::Register first, |
| 3967 | uint32_t value) { |
| 3968 | // Optimize special cases for individual halfs of `or-long` (`or` is simplified earlier). |
| 3969 | if (value == 0u) { |
| 3970 | if (!out.Is(first)) { |
| 3971 | __ Mov(out, first); |
| 3972 | } |
| 3973 | return; |
| 3974 | } |
| 3975 | if (value == 0xffffffffu) { |
| 3976 | __ Mvn(out, 0); |
| 3977 | return; |
| 3978 | } |
| 3979 | if (GetAssembler()->ShifterOperandCanHold(ORR, value)) { |
| 3980 | __ Orr(out, first, value); |
| 3981 | } else { |
| 3982 | DCHECK(GetAssembler()->ShifterOperandCanHold(ORN, ~value)); |
| 3983 | __ Orn(out, first, ~value); |
| 3984 | } |
| 3985 | } |
| 3986 | |
| 3987 | // TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl. |
| 3988 | void InstructionCodeGeneratorARMVIXL::GenerateEorConst(vixl32::Register out, |
| 3989 | vixl32::Register first, |
| 3990 | uint32_t value) { |
| 3991 | // Optimize special case for individual halfs of `xor-long` (`xor` is simplified earlier). |
| 3992 | if (value == 0u) { |
| 3993 | if (!out.Is(first)) { |
| 3994 | __ Mov(out, first); |
| 3995 | } |
| 3996 | return; |
| 3997 | } |
| 3998 | __ Eor(out, first, value); |
| 3999 | } |
| 4000 | |
| 4001 | void InstructionCodeGeneratorARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction) { |
| 4002 | LocationSummary* locations = instruction->GetLocations(); |
| 4003 | Location first = locations->InAt(0); |
| 4004 | Location second = locations->InAt(1); |
| 4005 | Location out = locations->Out(); |
| 4006 | |
| 4007 | if (second.IsConstant()) { |
| 4008 | uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant())); |
| 4009 | uint32_t value_low = Low32Bits(value); |
| 4010 | if (instruction->GetResultType() == Primitive::kPrimInt) { |
| 4011 | vixl32::Register first_reg = InputRegisterAt(instruction, 0); |
| 4012 | vixl32::Register out_reg = OutputRegister(instruction); |
| 4013 | if (instruction->IsAnd()) { |
| 4014 | GenerateAndConst(out_reg, first_reg, value_low); |
| 4015 | } else if (instruction->IsOr()) { |
| 4016 | GenerateOrrConst(out_reg, first_reg, value_low); |
| 4017 | } else { |
| 4018 | DCHECK(instruction->IsXor()); |
| 4019 | GenerateEorConst(out_reg, first_reg, value_low); |
| 4020 | } |
| 4021 | } else { |
| 4022 | DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong); |
| 4023 | uint32_t value_high = High32Bits(value); |
| 4024 | vixl32::Register first_low = LowRegisterFrom(first); |
| 4025 | vixl32::Register first_high = HighRegisterFrom(first); |
| 4026 | vixl32::Register out_low = LowRegisterFrom(out); |
| 4027 | vixl32::Register out_high = HighRegisterFrom(out); |
| 4028 | if (instruction->IsAnd()) { |
| 4029 | GenerateAndConst(out_low, first_low, value_low); |
| 4030 | GenerateAndConst(out_high, first_high, value_high); |
| 4031 | } else if (instruction->IsOr()) { |
| 4032 | GenerateOrrConst(out_low, first_low, value_low); |
| 4033 | GenerateOrrConst(out_high, first_high, value_high); |
| 4034 | } else { |
| 4035 | DCHECK(instruction->IsXor()); |
| 4036 | GenerateEorConst(out_low, first_low, value_low); |
| 4037 | GenerateEorConst(out_high, first_high, value_high); |
| 4038 | } |
| 4039 | } |
| 4040 | return; |
| 4041 | } |
| 4042 | |
| 4043 | if (instruction->GetResultType() == Primitive::kPrimInt) { |
| 4044 | vixl32::Register first_reg = InputRegisterAt(instruction, 0); |
| 4045 | vixl32::Register second_reg = InputRegisterAt(instruction, 1); |
| 4046 | vixl32::Register out_reg = OutputRegister(instruction); |
| 4047 | if (instruction->IsAnd()) { |
| 4048 | __ And(out_reg, first_reg, second_reg); |
| 4049 | } else if (instruction->IsOr()) { |
| 4050 | __ Orr(out_reg, first_reg, second_reg); |
| 4051 | } else { |
| 4052 | DCHECK(instruction->IsXor()); |
| 4053 | __ Eor(out_reg, first_reg, second_reg); |
| 4054 | } |
| 4055 | } else { |
| 4056 | DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong); |
| 4057 | vixl32::Register first_low = LowRegisterFrom(first); |
| 4058 | vixl32::Register first_high = HighRegisterFrom(first); |
| 4059 | vixl32::Register second_low = LowRegisterFrom(second); |
| 4060 | vixl32::Register second_high = HighRegisterFrom(second); |
| 4061 | vixl32::Register out_low = LowRegisterFrom(out); |
| 4062 | vixl32::Register out_high = HighRegisterFrom(out); |
| 4063 | if (instruction->IsAnd()) { |
| 4064 | __ And(out_low, first_low, second_low); |
| 4065 | __ And(out_high, first_high, second_high); |
| 4066 | } else if (instruction->IsOr()) { |
| 4067 | __ Orr(out_low, first_low, second_low); |
| 4068 | __ Orr(out_high, first_high, second_high); |
| 4069 | } else { |
| 4070 | DCHECK(instruction->IsXor()); |
| 4071 | __ Eor(out_low, first_low, second_low); |
| 4072 | __ Eor(out_high, first_high, second_high); |
| 4073 | } |
| 4074 | } |
| 4075 | } |
| 4076 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 4077 | void InstructionCodeGeneratorARMVIXL::GenerateGcRootFieldLoad( |
| 4078 | HInstruction* instruction ATTRIBUTE_UNUSED, |
| 4079 | Location root, |
| 4080 | vixl32::Register obj, |
| 4081 | uint32_t offset, |
| 4082 | bool requires_read_barrier) { |
| 4083 | vixl32::Register root_reg = RegisterFrom(root); |
| 4084 | if (requires_read_barrier) { |
| 4085 | TODO_VIXL32(FATAL); |
| 4086 | } else { |
| 4087 | // Plain GC root load with no read barrier. |
| 4088 | // /* GcRoot<mirror::Object> */ root = *(obj + offset) |
| 4089 | GetAssembler()->LoadFromOffset(kLoadWord, root_reg, obj, offset); |
| 4090 | // Note that GC roots are not affected by heap poisoning, thus we |
| 4091 | // do not have to unpoison `root_reg` here. |
| 4092 | } |
| 4093 | } |
| 4094 | |
Artem Serov | 02d3783 | 2016-10-25 15:25:33 +0100 | [diff] [blame^] | 4095 | void CodeGeneratorARMVIXL::MaybeGenerateReadBarrierSlow(HInstruction* instruction ATTRIBUTE_UNUSED, |
| 4096 | Location out, |
| 4097 | Location ref ATTRIBUTE_UNUSED, |
| 4098 | Location obj ATTRIBUTE_UNUSED, |
| 4099 | uint32_t offset ATTRIBUTE_UNUSED, |
| 4100 | Location index ATTRIBUTE_UNUSED) { |
| 4101 | if (kEmitCompilerReadBarrier) { |
| 4102 | DCHECK(!kUseBakerReadBarrier); |
| 4103 | TODO_VIXL32(FATAL); |
| 4104 | } else if (kPoisonHeapReferences) { |
| 4105 | GetAssembler()->UnpoisonHeapReference(RegisterFrom(out)); |
| 4106 | } |
| 4107 | } |
| 4108 | |
| 4109 | // Check if the desired_dispatch_info is supported. If it is, return it, |
| 4110 | // otherwise return a fall-back info that should be used instead. |
| 4111 | HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch( |
| 4112 | const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info ATTRIBUTE_UNUSED, |
| 4113 | HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) { |
| 4114 | // TODO(VIXL): Implement optimized code paths. |
| 4115 | return { |
| 4116 | HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod, |
| 4117 | HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod, |
| 4118 | 0u, |
| 4119 | 0u |
| 4120 | }; |
| 4121 | } |
| 4122 | |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 4123 | vixl32::Register CodeGeneratorARMVIXL::GetInvokeStaticOrDirectExtraParameter( |
| 4124 | HInvokeStaticOrDirect* invoke, vixl32::Register temp) { |
| 4125 | DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u); |
| 4126 | Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()); |
| 4127 | if (!invoke->GetLocations()->Intrinsified()) { |
| 4128 | return RegisterFrom(location); |
| 4129 | } |
| 4130 | // For intrinsics we allow any location, so it may be on the stack. |
| 4131 | if (!location.IsRegister()) { |
| 4132 | GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, location.GetStackIndex()); |
| 4133 | return temp; |
| 4134 | } |
| 4135 | // For register locations, check if the register was saved. If so, get it from the stack. |
| 4136 | // Note: There is a chance that the register was saved but not overwritten, so we could |
| 4137 | // save one load. However, since this is just an intrinsic slow path we prefer this |
| 4138 | // simple and more robust approach rather that trying to determine if that's the case. |
| 4139 | SlowPathCode* slow_path = GetCurrentSlowPath(); |
| 4140 | DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path. |
| 4141 | if (slow_path->IsCoreRegisterSaved(RegisterFrom(location).GetCode())) { |
| 4142 | int stack_offset = slow_path->GetStackOffsetOfCoreRegister(RegisterFrom(location).GetCode()); |
| 4143 | GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, stack_offset); |
| 4144 | return temp; |
| 4145 | } |
| 4146 | return RegisterFrom(location); |
| 4147 | } |
| 4148 | |
| 4149 | void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall( |
| 4150 | HInvokeStaticOrDirect* invoke, Location temp) { |
| 4151 | Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp. |
| 4152 | vixl32::Register temp_reg = RegisterFrom(temp); |
| 4153 | |
| 4154 | switch (invoke->GetMethodLoadKind()) { |
| 4155 | case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: { |
| 4156 | uint32_t offset = |
| 4157 | GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value(); |
| 4158 | // temp = thread->string_init_entrypoint |
| 4159 | GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, tr, offset); |
| 4160 | break; |
| 4161 | } |
| 4162 | case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: { |
| 4163 | Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()); |
| 4164 | vixl32::Register method_reg; |
| 4165 | if (current_method.IsRegister()) { |
| 4166 | method_reg = RegisterFrom(current_method); |
| 4167 | } else { |
| 4168 | TODO_VIXL32(FATAL); |
| 4169 | } |
| 4170 | // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_; |
| 4171 | GetAssembler()->LoadFromOffset( |
| 4172 | kLoadWord, |
| 4173 | temp_reg, |
| 4174 | method_reg, |
| 4175 | ArtMethod::DexCacheResolvedMethodsOffset(kArmPointerSize).Int32Value()); |
| 4176 | // temp = temp[index_in_cache]; |
| 4177 | // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file. |
| 4178 | uint32_t index_in_cache = invoke->GetDexMethodIndex(); |
| 4179 | GetAssembler()->LoadFromOffset( |
| 4180 | kLoadWord, temp_reg, temp_reg, CodeGenerator::GetCachePointerOffset(index_in_cache)); |
| 4181 | break; |
| 4182 | } |
| 4183 | default: |
| 4184 | TODO_VIXL32(FATAL); |
| 4185 | } |
| 4186 | |
| 4187 | // TODO(VIXL): Support `CodePtrLocation` values other than `kCallArtMethod`. |
| 4188 | if (invoke->GetCodePtrLocation() != HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod) { |
| 4189 | TODO_VIXL32(FATAL); |
| 4190 | } |
| 4191 | |
| 4192 | // LR = callee_method->entry_point_from_quick_compiled_code_ |
| 4193 | GetAssembler()->LoadFromOffset( |
| 4194 | kLoadWord, |
| 4195 | lr, |
| 4196 | RegisterFrom(callee_method), |
| 4197 | ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value()); |
| 4198 | // LR() |
| 4199 | __ Blx(lr); |
| 4200 | |
| 4201 | DCHECK(!IsLeafMethod()); |
| 4202 | } |
| 4203 | |
| 4204 | void CodeGeneratorARMVIXL::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) { |
| 4205 | vixl32::Register temp = RegisterFrom(temp_location); |
| 4206 | uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset( |
| 4207 | invoke->GetVTableIndex(), kArmPointerSize).Uint32Value(); |
| 4208 | |
| 4209 | // Use the calling convention instead of the location of the receiver, as |
| 4210 | // intrinsics may have put the receiver in a different register. In the intrinsics |
| 4211 | // slow path, the arguments have been moved to the right place, so here we are |
| 4212 | // guaranteed that the receiver is the first register of the calling convention. |
| 4213 | InvokeDexCallingConventionARMVIXL calling_convention; |
| 4214 | vixl32::Register receiver = calling_convention.GetRegisterAt(0); |
| 4215 | uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); |
| 4216 | // /* HeapReference<Class> */ temp = receiver->klass_ |
| 4217 | GetAssembler()->LoadFromOffset(kLoadWord, temp, receiver, class_offset); |
| 4218 | MaybeRecordImplicitNullCheck(invoke); |
| 4219 | // Instead of simply (possibly) unpoisoning `temp` here, we should |
| 4220 | // emit a read barrier for the previous class reference load. |
| 4221 | // However this is not required in practice, as this is an |
| 4222 | // intermediate/temporary reference and because the current |
| 4223 | // concurrent copying collector keeps the from-space memory |
| 4224 | // intact/accessible until the end of the marking phase (the |
| 4225 | // concurrent copying collector may not in the future). |
| 4226 | GetAssembler()->MaybeUnpoisonHeapReference(temp); |
| 4227 | |
| 4228 | // temp = temp->GetMethodAt(method_offset); |
| 4229 | uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset( |
| 4230 | kArmPointerSize).Int32Value(); |
| 4231 | GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset); |
| 4232 | // LR = temp->GetEntryPoint(); |
| 4233 | GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point); |
| 4234 | // LR(); |
| 4235 | __ Blx(lr); |
| 4236 | } |
| 4237 | |
Artem Serov | 02d3783 | 2016-10-25 15:25:33 +0100 | [diff] [blame^] | 4238 | // Copy the result of a call into the given target. |
| 4239 | void CodeGeneratorARMVIXL::MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED, |
| 4240 | Primitive::Type type ATTRIBUTE_UNUSED) { |
| 4241 | TODO_VIXL32(FATAL); |
Scott Wakeling | a7812ae | 2016-10-17 10:03:36 +0100 | [diff] [blame] | 4242 | } |
Scott Wakeling | fe88546 | 2016-09-22 10:24:38 +0100 | [diff] [blame] | 4243 | |
| 4244 | #undef __ |
| 4245 | #undef QUICK_ENTRY_POINT |
| 4246 | #undef TODO_VIXL32 |
| 4247 | |
| 4248 | } // namespace arm |
| 4249 | } // namespace art |