Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_COMPILER_OPTIMIZING_COMMON_ARM64_H_ |
| 18 | #define ART_COMPILER_OPTIMIZING_COMMON_ARM64_H_ |
| 19 | |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 20 | #include "code_generator.h" |
Anton Kirilov | 74234da | 2017-01-13 14:42:47 +0000 | [diff] [blame] | 21 | #include "instruction_simplifier_shared.h" |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 22 | #include "locations.h" |
| 23 | #include "nodes.h" |
| 24 | #include "utils/arm64/assembler_arm64.h" |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 25 | |
Artem Serov | af4e42a | 2016-08-08 15:11:24 +0100 | [diff] [blame] | 26 | // TODO(VIXL): Make VIXL compile with -Wshadow. |
| 27 | #pragma GCC diagnostic push |
| 28 | #pragma GCC diagnostic ignored "-Wshadow" |
| 29 | #include "aarch64/disasm-aarch64.h" |
| 30 | #include "aarch64/macro-assembler-aarch64.h" |
| 31 | #include "aarch64/simulator-aarch64.h" |
| 32 | #pragma GCC diagnostic pop |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 33 | |
| 34 | namespace art { |
Anton Kirilov | 74234da | 2017-01-13 14:42:47 +0000 | [diff] [blame] | 35 | |
| 36 | using helpers::CanFitInShifterOperand; |
| 37 | using helpers::HasShifterOperand; |
| 38 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 39 | namespace arm64 { |
| 40 | namespace helpers { |
| 41 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 42 | // Convenience helpers to ease conversion to and from VIXL operands. |
| 43 | static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32), |
| 44 | "Unexpected values for register codes."); |
| 45 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 46 | inline int VIXLRegCodeFromART(int code) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 47 | if (code == SP) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 48 | return vixl::aarch64::kSPRegInternalCode; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 49 | } |
| 50 | if (code == XZR) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 51 | return vixl::aarch64::kZeroRegCode; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 52 | } |
| 53 | return code; |
| 54 | } |
| 55 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 56 | inline int ARTRegCodeFromVIXL(int code) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 57 | if (code == vixl::aarch64::kSPRegInternalCode) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 58 | return SP; |
| 59 | } |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 60 | if (code == vixl::aarch64::kZeroRegCode) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 61 | return XZR; |
| 62 | } |
| 63 | return code; |
| 64 | } |
| 65 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 66 | inline vixl::aarch64::Register XRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 67 | DCHECK(location.IsRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 68 | return vixl::aarch64::Register::GetXRegFromCode(VIXLRegCodeFromART(location.reg())); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 69 | } |
| 70 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 71 | inline vixl::aarch64::Register WRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 72 | DCHECK(location.IsRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 73 | return vixl::aarch64::Register::GetWRegFromCode(VIXLRegCodeFromART(location.reg())); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 74 | } |
| 75 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 76 | inline vixl::aarch64::Register RegisterFrom(Location location, DataType::Type type) { |
| 77 | DCHECK(type != DataType::Type::kVoid && !DataType::IsFloatingPointType(type)) << type; |
| 78 | return type == DataType::Type::kInt64 ? XRegisterFrom(location) : WRegisterFrom(location); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 79 | } |
| 80 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 81 | inline vixl::aarch64::Register OutputRegister(HInstruction* instr) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 82 | return RegisterFrom(instr->GetLocations()->Out(), instr->GetType()); |
| 83 | } |
| 84 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 85 | inline vixl::aarch64::Register InputRegisterAt(HInstruction* instr, int input_index) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 86 | return RegisterFrom(instr->GetLocations()->InAt(input_index), |
| 87 | instr->InputAt(input_index)->GetType()); |
| 88 | } |
| 89 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 90 | inline vixl::aarch64::FPRegister DRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 91 | DCHECK(location.IsFpuRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 92 | return vixl::aarch64::FPRegister::GetDRegFromCode(location.reg()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 93 | } |
| 94 | |
Artem Serov | d4bccf1 | 2017-04-03 18:47:32 +0100 | [diff] [blame] | 95 | inline vixl::aarch64::FPRegister QRegisterFrom(Location location) { |
| 96 | DCHECK(location.IsFpuRegister()) << location; |
| 97 | return vixl::aarch64::FPRegister::GetQRegFromCode(location.reg()); |
| 98 | } |
| 99 | |
Artem Serov | b31f91f | 2017-04-05 11:31:19 +0100 | [diff] [blame] | 100 | inline vixl::aarch64::FPRegister VRegisterFrom(Location location) { |
| 101 | DCHECK(location.IsFpuRegister()) << location; |
| 102 | return vixl::aarch64::FPRegister::GetVRegFromCode(location.reg()); |
| 103 | } |
| 104 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 105 | inline vixl::aarch64::FPRegister SRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 106 | DCHECK(location.IsFpuRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 107 | return vixl::aarch64::FPRegister::GetSRegFromCode(location.reg()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 108 | } |
| 109 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 110 | inline vixl::aarch64::FPRegister FPRegisterFrom(Location location, DataType::Type type) { |
| 111 | DCHECK(DataType::IsFloatingPointType(type)) << type; |
| 112 | return type == DataType::Type::kFloat64 ? DRegisterFrom(location) : SRegisterFrom(location); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 113 | } |
| 114 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 115 | inline vixl::aarch64::FPRegister OutputFPRegister(HInstruction* instr) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 116 | return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType()); |
| 117 | } |
| 118 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 119 | inline vixl::aarch64::FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 120 | return FPRegisterFrom(instr->GetLocations()->InAt(input_index), |
| 121 | instr->InputAt(input_index)->GetType()); |
| 122 | } |
| 123 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 124 | inline vixl::aarch64::CPURegister CPURegisterFrom(Location location, DataType::Type type) { |
| 125 | return DataType::IsFloatingPointType(type) |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 126 | ? vixl::aarch64::CPURegister(FPRegisterFrom(location, type)) |
| 127 | : vixl::aarch64::CPURegister(RegisterFrom(location, type)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 128 | } |
| 129 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 130 | inline vixl::aarch64::CPURegister OutputCPURegister(HInstruction* instr) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 131 | return DataType::IsFloatingPointType(instr->GetType()) |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 132 | ? static_cast<vixl::aarch64::CPURegister>(OutputFPRegister(instr)) |
| 133 | : static_cast<vixl::aarch64::CPURegister>(OutputRegister(instr)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 134 | } |
| 135 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 136 | inline vixl::aarch64::CPURegister InputCPURegisterAt(HInstruction* instr, int index) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 137 | return DataType::IsFloatingPointType(instr->InputAt(index)->GetType()) |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 138 | ? static_cast<vixl::aarch64::CPURegister>(InputFPRegisterAt(instr, index)) |
| 139 | : static_cast<vixl::aarch64::CPURegister>(InputRegisterAt(instr, index)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 140 | } |
| 141 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 142 | inline vixl::aarch64::CPURegister InputCPURegisterOrZeroRegAt(HInstruction* instr, |
Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 143 | int index) { |
| 144 | HInstruction* input = instr->InputAt(index); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 145 | DataType::Type input_type = input->GetType(); |
Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 146 | if (input->IsConstant() && input->AsConstant()->IsZeroBitPattern()) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 147 | return (DataType::Size(input_type) >= vixl::aarch64::kXRegSizeInBytes) |
Scott Wakeling | 79db997 | 2017-01-19 14:08:42 +0000 | [diff] [blame] | 148 | ? vixl::aarch64::Register(vixl::aarch64::xzr) |
| 149 | : vixl::aarch64::Register(vixl::aarch64::wzr); |
Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 150 | } |
| 151 | return InputCPURegisterAt(instr, index); |
| 152 | } |
| 153 | |
Evgeny Astigeevich | f9e9054 | 2018-06-25 13:43:53 +0100 | [diff] [blame] | 154 | inline int64_t Int64FromLocation(Location location) { |
| 155 | return Int64FromConstant(location.GetConstant()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 156 | } |
| 157 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 158 | inline vixl::aarch64::Operand OperandFrom(Location location, DataType::Type type) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 159 | if (location.IsRegister()) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 160 | return vixl::aarch64::Operand(RegisterFrom(location, type)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 161 | } else { |
Evgeny Astigeevich | f9e9054 | 2018-06-25 13:43:53 +0100 | [diff] [blame] | 162 | return vixl::aarch64::Operand(Int64FromLocation(location)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 163 | } |
| 164 | } |
| 165 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 166 | inline vixl::aarch64::Operand InputOperandAt(HInstruction* instr, int input_index) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 167 | return OperandFrom(instr->GetLocations()->InAt(input_index), |
| 168 | instr->InputAt(input_index)->GetType()); |
| 169 | } |
| 170 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 171 | inline vixl::aarch64::MemOperand StackOperandFrom(Location location) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 172 | return vixl::aarch64::MemOperand(vixl::aarch64::sp, location.GetStackIndex()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 173 | } |
| 174 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 175 | inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base, |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 176 | size_t offset = 0) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 177 | // A heap reference must be 32bit, so fit in a W register. |
| 178 | DCHECK(base.IsW()); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 179 | return vixl::aarch64::MemOperand(base.X(), offset); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 180 | } |
| 181 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 182 | inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base, |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 183 | const vixl::aarch64::Register& regoffset, |
| 184 | vixl::aarch64::Shift shift = vixl::aarch64::LSL, |
| 185 | unsigned shift_amount = 0) { |
Alexandre Rames | 82000b0 | 2015-07-07 11:34:16 +0100 | [diff] [blame] | 186 | // A heap reference must be 32bit, so fit in a W register. |
| 187 | DCHECK(base.IsW()); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 188 | return vixl::aarch64::MemOperand(base.X(), regoffset, shift, shift_amount); |
Alexandre Rames | 82000b0 | 2015-07-07 11:34:16 +0100 | [diff] [blame] | 189 | } |
| 190 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 191 | inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base, |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 192 | Offset offset) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 193 | return HeapOperand(base, offset.SizeValue()); |
| 194 | } |
| 195 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 196 | inline vixl::aarch64::MemOperand HeapOperandFrom(Location location, Offset offset) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 197 | return HeapOperand(RegisterFrom(location, DataType::Type::kReference), offset); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 198 | } |
| 199 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 200 | inline Location LocationFrom(const vixl::aarch64::Register& reg) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 201 | return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.GetCode())); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 202 | } |
| 203 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 204 | inline Location LocationFrom(const vixl::aarch64::FPRegister& fpreg) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 205 | return Location::FpuRegisterLocation(fpreg.GetCode()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 206 | } |
| 207 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 208 | inline vixl::aarch64::Operand OperandFromMemOperand( |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 209 | const vixl::aarch64::MemOperand& mem_op) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 210 | if (mem_op.IsImmediateOffset()) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 211 | return vixl::aarch64::Operand(mem_op.GetOffset()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 212 | } else { |
| 213 | DCHECK(mem_op.IsRegisterOffset()); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 214 | if (mem_op.GetExtend() != vixl::aarch64::NO_EXTEND) { |
| 215 | return vixl::aarch64::Operand(mem_op.GetRegisterOffset(), |
| 216 | mem_op.GetExtend(), |
| 217 | mem_op.GetShiftAmount()); |
| 218 | } else if (mem_op.GetShift() != vixl::aarch64::NO_SHIFT) { |
| 219 | return vixl::aarch64::Operand(mem_op.GetRegisterOffset(), |
| 220 | mem_op.GetShift(), |
| 221 | mem_op.GetShiftAmount()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 222 | } else { |
| 223 | LOG(FATAL) << "Should not reach here"; |
| 224 | UNREACHABLE(); |
| 225 | } |
| 226 | } |
| 227 | } |
| 228 | |
Petre-Ionut Tudor | 2227fe4 | 2018-04-20 17:12:05 +0100 | [diff] [blame] | 229 | inline bool AddSubCanEncodeAsImmediate(int64_t value) { |
| 230 | // If `value` does not fit but `-value` does, VIXL will automatically use |
| 231 | // the 'opposite' instruction. |
| 232 | return vixl::aarch64::Assembler::IsImmAddSub(value) |
| 233 | || vixl::aarch64::Assembler::IsImmAddSub(-value); |
| 234 | } |
| 235 | |
Artem Serov | 8dfe746 | 2017-06-01 14:28:48 +0100 | [diff] [blame] | 236 | inline bool Arm64CanEncodeConstantAsImmediate(HConstant* constant, HInstruction* instr) { |
| 237 | int64_t value = CodeGenerator::GetInt64ValueOf(constant); |
| 238 | |
| 239 | // TODO: Improve this when IsSIMDConstantEncodable method is implemented in VIXL. |
| 240 | if (instr->IsVecReplicateScalar()) { |
| 241 | if (constant->IsLongConstant()) { |
| 242 | return false; |
| 243 | } else if (constant->IsFloatConstant()) { |
| 244 | return vixl::aarch64::Assembler::IsImmFP32(constant->AsFloatConstant()->GetValue()); |
| 245 | } else if (constant->IsDoubleConstant()) { |
| 246 | return vixl::aarch64::Assembler::IsImmFP64(constant->AsDoubleConstant()->GetValue()); |
| 247 | } |
| 248 | return IsUint<8>(value); |
| 249 | } |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 250 | |
Petre-Ionut Tudor | 2227fe4 | 2018-04-20 17:12:05 +0100 | [diff] [blame] | 251 | // Code generation for Min/Max: |
| 252 | // Cmp left_op, right_op |
| 253 | // Csel dst, left_op, right_op, cond |
| 254 | if (instr->IsMin() || instr->IsMax()) { |
| 255 | if (constant->GetUses().HasExactlyOneElement()) { |
| 256 | // If value can be encoded as immediate for the Cmp, then let VIXL handle |
| 257 | // the constant generation for the Csel. |
| 258 | return AddSubCanEncodeAsImmediate(value); |
| 259 | } |
| 260 | // These values are encodable as immediates for Cmp and VIXL will use csinc and csinv |
| 261 | // with the zr register as right_op, hence no constant generation is required. |
| 262 | return constant->IsZeroBitPattern() || constant->IsOne() || constant->IsMinusOne(); |
| 263 | } |
| 264 | |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 265 | // For single uses we let VIXL handle the constant generation since it will |
| 266 | // use registers that are not managed by the register allocator (wip0, wip1). |
Vladimir Marko | 46817b8 | 2016-03-29 12:21:58 +0100 | [diff] [blame] | 267 | if (constant->GetUses().HasExactlyOneElement()) { |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 268 | return true; |
| 269 | } |
| 270 | |
Scott Wakeling | 40a04bf | 2015-12-11 09:50:36 +0000 | [diff] [blame] | 271 | // Our code generator ensures shift distances are within an encodable range. |
| 272 | if (instr->IsRor()) { |
| 273 | return true; |
| 274 | } |
| 275 | |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 276 | if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) { |
| 277 | // Uses logical operations. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 278 | return vixl::aarch64::Assembler::IsImmLogical(value, vixl::aarch64::kXRegSize); |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 279 | } else if (instr->IsNeg()) { |
| 280 | // Uses mov -immediate. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 281 | return vixl::aarch64::Assembler::IsImmMovn(value, vixl::aarch64::kXRegSize); |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 282 | } else { |
| 283 | DCHECK(instr->IsAdd() || |
Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 284 | instr->IsIntermediateAddress() || |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 285 | instr->IsBoundsCheck() || |
| 286 | instr->IsCompare() || |
| 287 | instr->IsCondition() || |
Roland Levillain | 22c4922 | 2016-03-18 14:04:28 +0000 | [diff] [blame] | 288 | instr->IsSub()) |
| 289 | << instr->DebugName(); |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 290 | // Uses aliases of ADD/SUB instructions. |
Petre-Ionut Tudor | 2227fe4 | 2018-04-20 17:12:05 +0100 | [diff] [blame] | 291 | return AddSubCanEncodeAsImmediate(value); |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 292 | } |
| 293 | } |
| 294 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 295 | inline Location ARM64EncodableConstantOrRegister(HInstruction* constant, |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 296 | HInstruction* instr) { |
| 297 | if (constant->IsConstant() |
Artem Serov | 8dfe746 | 2017-06-01 14:28:48 +0100 | [diff] [blame] | 298 | && Arm64CanEncodeConstantAsImmediate(constant->AsConstant(), instr)) { |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 299 | return Location::ConstantLocation(constant->AsConstant()); |
| 300 | } |
| 301 | |
| 302 | return Location::RequiresRegister(); |
| 303 | } |
| 304 | |
Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 305 | // Check if registers in art register set have the same register code in vixl. If the register |
| 306 | // codes are same, we can initialize vixl register list simply by the register masks. Currently, |
| 307 | // only SP/WSP and ZXR/WZR codes are different between art and vixl. |
| 308 | // Note: This function is only used for debug checks. |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 309 | inline bool ArtVixlRegCodeCoherentForRegSet(uint32_t art_core_registers, |
Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 310 | size_t num_core, |
| 311 | uint32_t art_fpu_registers, |
| 312 | size_t num_fpu) { |
Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 313 | // The register masks won't work if the number of register is larger than 32. |
| 314 | DCHECK_GE(sizeof(art_core_registers) * 8, num_core); |
| 315 | DCHECK_GE(sizeof(art_fpu_registers) * 8, num_fpu); |
| 316 | for (size_t art_reg_code = 0; art_reg_code < num_core; ++art_reg_code) { |
| 317 | if (RegisterSet::Contains(art_core_registers, art_reg_code)) { |
| 318 | if (art_reg_code != static_cast<size_t>(VIXLRegCodeFromART(art_reg_code))) { |
| 319 | return false; |
| 320 | } |
| 321 | } |
| 322 | } |
| 323 | // There is no register code translation for float registers. |
| 324 | return true; |
| 325 | } |
| 326 | |
Anton Kirilov | 74234da | 2017-01-13 14:42:47 +0000 | [diff] [blame] | 327 | inline vixl::aarch64::Shift ShiftFromOpKind(HDataProcWithShifterOp::OpKind op_kind) { |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 328 | switch (op_kind) { |
Anton Kirilov | 74234da | 2017-01-13 14:42:47 +0000 | [diff] [blame] | 329 | case HDataProcWithShifterOp::kASR: return vixl::aarch64::ASR; |
| 330 | case HDataProcWithShifterOp::kLSL: return vixl::aarch64::LSL; |
| 331 | case HDataProcWithShifterOp::kLSR: return vixl::aarch64::LSR; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 332 | default: |
| 333 | LOG(FATAL) << "Unexpected op kind " << op_kind; |
| 334 | UNREACHABLE(); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 335 | return vixl::aarch64::NO_SHIFT; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 336 | } |
| 337 | } |
| 338 | |
Anton Kirilov | 74234da | 2017-01-13 14:42:47 +0000 | [diff] [blame] | 339 | inline vixl::aarch64::Extend ExtendFromOpKind(HDataProcWithShifterOp::OpKind op_kind) { |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 340 | switch (op_kind) { |
Anton Kirilov | 74234da | 2017-01-13 14:42:47 +0000 | [diff] [blame] | 341 | case HDataProcWithShifterOp::kUXTB: return vixl::aarch64::UXTB; |
| 342 | case HDataProcWithShifterOp::kUXTH: return vixl::aarch64::UXTH; |
| 343 | case HDataProcWithShifterOp::kUXTW: return vixl::aarch64::UXTW; |
| 344 | case HDataProcWithShifterOp::kSXTB: return vixl::aarch64::SXTB; |
| 345 | case HDataProcWithShifterOp::kSXTH: return vixl::aarch64::SXTH; |
| 346 | case HDataProcWithShifterOp::kSXTW: return vixl::aarch64::SXTW; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 347 | default: |
| 348 | LOG(FATAL) << "Unexpected op kind " << op_kind; |
| 349 | UNREACHABLE(); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 350 | return vixl::aarch64::NO_EXTEND; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 351 | } |
| 352 | } |
| 353 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 354 | inline bool ShifterOperandSupportsExtension(HInstruction* instruction) { |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 355 | DCHECK(HasShifterOperand(instruction, InstructionSet::kArm64)); |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 356 | // Although the `neg` instruction is an alias of the `sub` instruction, `HNeg` |
| 357 | // does *not* support extension. This is because the `extended register` form |
| 358 | // of the `sub` instruction interprets the left register with code 31 as the |
| 359 | // stack pointer and not the zero register. (So does the `immediate` form.) In |
| 360 | // the other form `shifted register, the register with code 31 is interpreted |
| 361 | // as the zero register. |
| 362 | return instruction->IsAdd() || instruction->IsSub(); |
| 363 | } |
| 364 | |
Alexandre Rames | badf2b2 | 2016-08-24 17:08:49 +0100 | [diff] [blame] | 365 | inline bool IsConstantZeroBitPattern(const HInstruction* instruction) { |
Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 366 | return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern(); |
| 367 | } |
| 368 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 369 | } // namespace helpers |
| 370 | } // namespace arm64 |
| 371 | } // namespace art |
| 372 | |
| 373 | #endif // ART_COMPILER_OPTIMIZING_COMMON_ARM64_H_ |