Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "loop_optimization.h" |
| 18 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 19 | #include "arch/arm/instruction_set_features_arm.h" |
| 20 | #include "arch/arm64/instruction_set_features_arm64.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 21 | #include "arch/instruction_set.h" |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 22 | #include "arch/x86/instruction_set_features_x86.h" |
| 23 | #include "arch/x86_64/instruction_set_features_x86_64.h" |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 24 | #include "code_generator.h" |
Vladimir Marko | a043111 | 2018-06-25 09:32:54 +0100 | [diff] [blame] | 25 | #include "driver/compiler_options.h" |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 26 | #include "linear_order.h" |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 27 | #include "mirror/array-inl.h" |
| 28 | #include "mirror/string.h" |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 29 | |
Vladimir Marko | 0a51605 | 2019-10-14 13:00:44 +0000 | [diff] [blame] | 30 | namespace art { |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 31 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 32 | // Enables vectorization (SIMDization) in the loop optimizer. |
| 33 | static constexpr bool kEnableVectorization = true; |
| 34 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 35 | // |
| 36 | // Static helpers. |
| 37 | // |
| 38 | |
| 39 | // Base alignment for arrays/strings guaranteed by the Android runtime. |
| 40 | static uint32_t BaseAlignment() { |
| 41 | return kObjectAlignment; |
| 42 | } |
| 43 | |
| 44 | // Hidden offset for arrays/strings guaranteed by the Android runtime. |
| 45 | static uint32_t HiddenOffset(DataType::Type type, bool is_string_char_at) { |
| 46 | return is_string_char_at |
| 47 | ? mirror::String::ValueOffset().Uint32Value() |
| 48 | : mirror::Array::DataOffset(DataType::Size(type)).Uint32Value(); |
| 49 | } |
| 50 | |
Aart Bik | 9abf894 | 2016-10-14 09:49:42 -0700 | [diff] [blame] | 51 | // Remove the instruction from the graph. A bit more elaborate than the usual |
| 52 | // instruction removal, since there may be a cycle in the use structure. |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 53 | static void RemoveFromCycle(HInstruction* instruction) { |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 54 | instruction->RemoveAsUserOfAllInputs(); |
| 55 | instruction->RemoveEnvironmentUsers(); |
| 56 | instruction->GetBlock()->RemoveInstructionOrPhi(instruction, /*ensure_safety=*/ false); |
Artem Serov | 21c7e6f | 2017-07-27 16:04:42 +0100 | [diff] [blame] | 57 | RemoveEnvironmentUses(instruction); |
| 58 | ResetEnvironmentInputRecords(instruction); |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 59 | } |
| 60 | |
Aart Bik | 807868e | 2016-11-03 17:51:43 -0700 | [diff] [blame] | 61 | // Detect a goto block and sets succ to the single successor. |
Aart Bik | e3dedc5 | 2016-11-02 17:50:27 -0700 | [diff] [blame] | 62 | static bool IsGotoBlock(HBasicBlock* block, /*out*/ HBasicBlock** succ) { |
| 63 | if (block->GetPredecessors().size() == 1 && |
| 64 | block->GetSuccessors().size() == 1 && |
| 65 | block->IsSingleGoto()) { |
| 66 | *succ = block->GetSingleSuccessor(); |
| 67 | return true; |
| 68 | } |
| 69 | return false; |
| 70 | } |
| 71 | |
Aart Bik | 807868e | 2016-11-03 17:51:43 -0700 | [diff] [blame] | 72 | // Detect an early exit loop. |
| 73 | static bool IsEarlyExit(HLoopInformation* loop_info) { |
| 74 | HBlocksInLoopReversePostOrderIterator it_loop(*loop_info); |
| 75 | for (it_loop.Advance(); !it_loop.Done(); it_loop.Advance()) { |
| 76 | for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) { |
| 77 | if (!loop_info->Contains(*successor)) { |
| 78 | return true; |
| 79 | } |
| 80 | } |
| 81 | } |
| 82 | return false; |
| 83 | } |
| 84 | |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 85 | // Forward declaration. |
| 86 | static bool IsZeroExtensionAndGet(HInstruction* instruction, |
| 87 | DataType::Type type, |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 88 | /*out*/ HInstruction** operand); |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 89 | |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 90 | // Detect a sign extension in instruction from the given type. |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 91 | // Returns the promoted operand on success. |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 92 | static bool IsSignExtensionAndGet(HInstruction* instruction, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 93 | DataType::Type type, |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 94 | /*out*/ HInstruction** operand) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 95 | // Accept any already wider constant that would be handled properly by sign |
| 96 | // extension when represented in the *width* of the given narrower data type |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 97 | // (the fact that Uint8/Uint16 normally zero extend does not matter here). |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 98 | int64_t value = 0; |
Aart Bik | 50e20d5 | 2017-05-05 14:07:29 -0700 | [diff] [blame] | 99 | if (IsInt64AndGet(instruction, /*out*/ &value)) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 100 | switch (type) { |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 101 | case DataType::Type::kUint8: |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 102 | case DataType::Type::kInt8: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 103 | if (IsInt<8>(value)) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 104 | *operand = instruction; |
| 105 | return true; |
| 106 | } |
| 107 | return false; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 108 | case DataType::Type::kUint16: |
| 109 | case DataType::Type::kInt16: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 110 | if (IsInt<16>(value)) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 111 | *operand = instruction; |
| 112 | return true; |
| 113 | } |
| 114 | return false; |
| 115 | default: |
| 116 | return false; |
| 117 | } |
| 118 | } |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 119 | // An implicit widening conversion of any signed expression sign-extends. |
| 120 | if (instruction->GetType() == type) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 121 | switch (type) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 122 | case DataType::Type::kInt8: |
| 123 | case DataType::Type::kInt16: |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 124 | *operand = instruction; |
| 125 | return true; |
| 126 | default: |
| 127 | return false; |
| 128 | } |
| 129 | } |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 130 | // An explicit widening conversion of a signed expression sign-extends. |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 131 | if (instruction->IsTypeConversion()) { |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 132 | HInstruction* conv = instruction->InputAt(0); |
| 133 | DataType::Type from = conv->GetType(); |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 134 | switch (instruction->GetType()) { |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 135 | case DataType::Type::kInt32: |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 136 | case DataType::Type::kInt64: |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 137 | if (type == from && (from == DataType::Type::kInt8 || |
| 138 | from == DataType::Type::kInt16 || |
| 139 | from == DataType::Type::kInt32)) { |
| 140 | *operand = conv; |
| 141 | return true; |
| 142 | } |
| 143 | return false; |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 144 | case DataType::Type::kInt16: |
| 145 | return type == DataType::Type::kUint16 && |
| 146 | from == DataType::Type::kUint16 && |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 147 | IsZeroExtensionAndGet(instruction->InputAt(0), type, /*out*/ operand); |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 148 | default: |
| 149 | return false; |
| 150 | } |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 151 | } |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 152 | return false; |
| 153 | } |
| 154 | |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 155 | // Detect a zero extension in instruction from the given type. |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 156 | // Returns the promoted operand on success. |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 157 | static bool IsZeroExtensionAndGet(HInstruction* instruction, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 158 | DataType::Type type, |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 159 | /*out*/ HInstruction** operand) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 160 | // Accept any already wider constant that would be handled properly by zero |
| 161 | // extension when represented in the *width* of the given narrower data type |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 162 | // (the fact that Int8/Int16 normally sign extend does not matter here). |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 163 | int64_t value = 0; |
Aart Bik | 50e20d5 | 2017-05-05 14:07:29 -0700 | [diff] [blame] | 164 | if (IsInt64AndGet(instruction, /*out*/ &value)) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 165 | switch (type) { |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 166 | case DataType::Type::kUint8: |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 167 | case DataType::Type::kInt8: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 168 | if (IsUint<8>(value)) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 169 | *operand = instruction; |
| 170 | return true; |
| 171 | } |
| 172 | return false; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 173 | case DataType::Type::kUint16: |
| 174 | case DataType::Type::kInt16: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 175 | if (IsUint<16>(value)) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 176 | *operand = instruction; |
| 177 | return true; |
| 178 | } |
| 179 | return false; |
| 180 | default: |
| 181 | return false; |
| 182 | } |
| 183 | } |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 184 | // An implicit widening conversion of any unsigned expression zero-extends. |
| 185 | if (instruction->GetType() == type) { |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 186 | switch (type) { |
| 187 | case DataType::Type::kUint8: |
| 188 | case DataType::Type::kUint16: |
| 189 | *operand = instruction; |
| 190 | return true; |
| 191 | default: |
| 192 | return false; |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 193 | } |
| 194 | } |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 195 | // An explicit widening conversion of an unsigned expression zero-extends. |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 196 | if (instruction->IsTypeConversion()) { |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 197 | HInstruction* conv = instruction->InputAt(0); |
| 198 | DataType::Type from = conv->GetType(); |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 199 | switch (instruction->GetType()) { |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 200 | case DataType::Type::kInt32: |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 201 | case DataType::Type::kInt64: |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 202 | if (type == from && from == DataType::Type::kUint16) { |
| 203 | *operand = conv; |
| 204 | return true; |
| 205 | } |
| 206 | return false; |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 207 | case DataType::Type::kUint16: |
| 208 | return type == DataType::Type::kInt16 && |
| 209 | from == DataType::Type::kInt16 && |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 210 | IsSignExtensionAndGet(instruction->InputAt(0), type, /*out*/ operand); |
Aart Bik | 68ca702 | 2017-09-26 16:44:23 -0700 | [diff] [blame] | 211 | default: |
| 212 | return false; |
| 213 | } |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 214 | } |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 215 | return false; |
| 216 | } |
| 217 | |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 218 | // Detect situations with same-extension narrower operands. |
| 219 | // Returns true on success and sets is_unsigned accordingly. |
| 220 | static bool IsNarrowerOperands(HInstruction* a, |
| 221 | HInstruction* b, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 222 | DataType::Type type, |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 223 | /*out*/ HInstruction** r, |
| 224 | /*out*/ HInstruction** s, |
| 225 | /*out*/ bool* is_unsigned) { |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 226 | DCHECK(a != nullptr && b != nullptr); |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 227 | // Look for a matching sign extension. |
| 228 | DataType::Type stype = HVecOperation::ToSignedType(type); |
| 229 | if (IsSignExtensionAndGet(a, stype, r) && IsSignExtensionAndGet(b, stype, s)) { |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 230 | *is_unsigned = false; |
| 231 | return true; |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 232 | } |
| 233 | // Look for a matching zero extension. |
| 234 | DataType::Type utype = HVecOperation::ToUnsignedType(type); |
| 235 | if (IsZeroExtensionAndGet(a, utype, r) && IsZeroExtensionAndGet(b, utype, s)) { |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 236 | *is_unsigned = true; |
| 237 | return true; |
| 238 | } |
| 239 | return false; |
| 240 | } |
| 241 | |
| 242 | // As above, single operand. |
| 243 | static bool IsNarrowerOperand(HInstruction* a, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 244 | DataType::Type type, |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 245 | /*out*/ HInstruction** r, |
| 246 | /*out*/ bool* is_unsigned) { |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 247 | DCHECK(a != nullptr); |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 248 | // Look for a matching sign extension. |
| 249 | DataType::Type stype = HVecOperation::ToSignedType(type); |
| 250 | if (IsSignExtensionAndGet(a, stype, r)) { |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 251 | *is_unsigned = false; |
| 252 | return true; |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 253 | } |
| 254 | // Look for a matching zero extension. |
| 255 | DataType::Type utype = HVecOperation::ToUnsignedType(type); |
| 256 | if (IsZeroExtensionAndGet(a, utype, r)) { |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 257 | *is_unsigned = true; |
| 258 | return true; |
| 259 | } |
| 260 | return false; |
| 261 | } |
| 262 | |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 263 | // Compute relative vector length based on type difference. |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 264 | static uint32_t GetOtherVL(DataType::Type other_type, DataType::Type vector_type, uint32_t vl) { |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 265 | DCHECK(DataType::IsIntegralType(other_type)); |
| 266 | DCHECK(DataType::IsIntegralType(vector_type)); |
| 267 | DCHECK_GE(DataType::SizeShift(other_type), DataType::SizeShift(vector_type)); |
| 268 | return vl >> (DataType::SizeShift(other_type) - DataType::SizeShift(vector_type)); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 269 | } |
| 270 | |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 271 | // Detect up to two added operands a and b and an acccumulated constant c. |
| 272 | static bool IsAddConst(HInstruction* instruction, |
| 273 | /*out*/ HInstruction** a, |
| 274 | /*out*/ HInstruction** b, |
| 275 | /*out*/ int64_t* c, |
| 276 | int32_t depth = 8) { // don't search too deep |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 277 | int64_t value = 0; |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 278 | // Enter add/sub while still within reasonable depth. |
| 279 | if (depth > 0) { |
| 280 | if (instruction->IsAdd()) { |
| 281 | return IsAddConst(instruction->InputAt(0), a, b, c, depth - 1) && |
| 282 | IsAddConst(instruction->InputAt(1), a, b, c, depth - 1); |
| 283 | } else if (instruction->IsSub() && |
| 284 | IsInt64AndGet(instruction->InputAt(1), &value)) { |
| 285 | *c -= value; |
| 286 | return IsAddConst(instruction->InputAt(0), a, b, c, depth - 1); |
| 287 | } |
| 288 | } |
| 289 | // Otherwise, deal with leaf nodes. |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 290 | if (IsInt64AndGet(instruction, &value)) { |
| 291 | *c += value; |
| 292 | return true; |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 293 | } else if (*a == nullptr) { |
| 294 | *a = instruction; |
| 295 | return true; |
| 296 | } else if (*b == nullptr) { |
| 297 | *b = instruction; |
| 298 | return true; |
| 299 | } |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 300 | return false; // too many operands |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 301 | } |
| 302 | |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 303 | // Detect a + b + c with optional constant c. |
| 304 | static bool IsAddConst2(HGraph* graph, |
| 305 | HInstruction* instruction, |
| 306 | /*out*/ HInstruction** a, |
| 307 | /*out*/ HInstruction** b, |
| 308 | /*out*/ int64_t* c) { |
Artem Serov | b47b978 | 2019-12-04 21:02:09 +0000 | [diff] [blame] | 309 | // We want an actual add/sub and not the trivial case where {b: 0, c: 0}. |
| 310 | if (IsAddOrSub(instruction) && IsAddConst(instruction, a, b, c) && *a != nullptr) { |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 311 | if (*b == nullptr) { |
| 312 | // Constant is usually already present, unless accumulated. |
| 313 | *b = graph->GetConstant(instruction->GetType(), (*c)); |
| 314 | *c = 0; |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 315 | } |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 316 | return true; |
| 317 | } |
| 318 | return false; |
| 319 | } |
| 320 | |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 321 | // Detect a direct a - b or a hidden a - (-c). |
| 322 | static bool IsSubConst2(HGraph* graph, |
| 323 | HInstruction* instruction, |
| 324 | /*out*/ HInstruction** a, |
| 325 | /*out*/ HInstruction** b) { |
| 326 | int64_t c = 0; |
| 327 | if (instruction->IsSub()) { |
| 328 | *a = instruction->InputAt(0); |
| 329 | *b = instruction->InputAt(1); |
| 330 | return true; |
| 331 | } else if (IsAddConst(instruction, a, b, &c) && *a != nullptr && *b == nullptr) { |
| 332 | // Constant for the hidden subtraction. |
| 333 | *b = graph->GetConstant(instruction->GetType(), -c); |
| 334 | return true; |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 335 | } |
| 336 | return false; |
| 337 | } |
| 338 | |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 339 | // Detect reductions of the following forms, |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 340 | // x = x_phi + .. |
| 341 | // x = x_phi - .. |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 342 | static bool HasReductionFormat(HInstruction* reduction, HInstruction* phi) { |
Aart Bik | 3f08e9b | 2018-05-01 13:42:03 -0700 | [diff] [blame] | 343 | if (reduction->IsAdd()) { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 344 | return (reduction->InputAt(0) == phi && reduction->InputAt(1) != phi) || |
| 345 | (reduction->InputAt(0) != phi && reduction->InputAt(1) == phi); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 346 | } else if (reduction->IsSub()) { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 347 | return (reduction->InputAt(0) == phi && reduction->InputAt(1) != phi); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 348 | } |
| 349 | return false; |
| 350 | } |
| 351 | |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 352 | // Translates vector operation to reduction kind. |
| 353 | static HVecReduce::ReductionKind GetReductionKind(HVecOperation* reduction) { |
Shalini Salomi Bodapati | 81d15be | 2019-05-30 11:00:42 +0530 | [diff] [blame] | 354 | if (reduction->IsVecAdd() || |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 355 | reduction->IsVecSub() || |
| 356 | reduction->IsVecSADAccumulate() || |
| 357 | reduction->IsVecDotProd()) { |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 358 | return HVecReduce::kSum; |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 359 | } |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 360 | LOG(FATAL) << "Unsupported SIMD reduction " << reduction->GetId(); |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 361 | UNREACHABLE(); |
| 362 | } |
| 363 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 364 | // Test vector restrictions. |
| 365 | static bool HasVectorRestrictions(uint64_t restrictions, uint64_t tested) { |
| 366 | return (restrictions & tested) != 0; |
| 367 | } |
| 368 | |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 369 | // Insert an instruction. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 370 | static HInstruction* Insert(HBasicBlock* block, HInstruction* instruction) { |
| 371 | DCHECK(block != nullptr); |
| 372 | DCHECK(instruction != nullptr); |
| 373 | block->InsertInstructionBefore(instruction, block->GetLastInstruction()); |
| 374 | return instruction; |
| 375 | } |
| 376 | |
Artem Serov | 21c7e6f | 2017-07-27 16:04:42 +0100 | [diff] [blame] | 377 | // Check that instructions from the induction sets are fully removed: have no uses |
| 378 | // and no other instructions use them. |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 379 | static bool CheckInductionSetFullyRemoved(ScopedArenaSet<HInstruction*>* iset) { |
Artem Serov | 21c7e6f | 2017-07-27 16:04:42 +0100 | [diff] [blame] | 380 | for (HInstruction* instr : *iset) { |
| 381 | if (instr->GetBlock() != nullptr || |
| 382 | !instr->GetUses().empty() || |
| 383 | !instr->GetEnvUses().empty() || |
| 384 | HasEnvironmentUsedByOthers(instr)) { |
| 385 | return false; |
| 386 | } |
| 387 | } |
Artem Serov | 21c7e6f | 2017-07-27 16:04:42 +0100 | [diff] [blame] | 388 | return true; |
| 389 | } |
| 390 | |
Artem Serov | 72411e6 | 2017-10-19 16:18:07 +0100 | [diff] [blame] | 391 | // Tries to statically evaluate condition of the specified "HIf" for other condition checks. |
| 392 | static void TryToEvaluateIfCondition(HIf* instruction, HGraph* graph) { |
| 393 | HInstruction* cond = instruction->InputAt(0); |
| 394 | |
| 395 | // If a condition 'cond' is evaluated in an HIf instruction then in the successors of the |
| 396 | // IF_BLOCK we statically know the value of the condition 'cond' (TRUE in TRUE_SUCC, FALSE in |
| 397 | // FALSE_SUCC). Using that we can replace another evaluation (use) EVAL of the same 'cond' |
| 398 | // with TRUE value (FALSE value) if every path from the ENTRY_BLOCK to EVAL_BLOCK contains the |
| 399 | // edge HIF_BLOCK->TRUE_SUCC (HIF_BLOCK->FALSE_SUCC). |
| 400 | // if (cond) { if(cond) { |
| 401 | // if (cond) {} if (1) {} |
| 402 | // } else { =======> } else { |
| 403 | // if (cond) {} if (0) {} |
| 404 | // } } |
| 405 | if (!cond->IsConstant()) { |
| 406 | HBasicBlock* true_succ = instruction->IfTrueSuccessor(); |
| 407 | HBasicBlock* false_succ = instruction->IfFalseSuccessor(); |
| 408 | |
| 409 | DCHECK_EQ(true_succ->GetPredecessors().size(), 1u); |
| 410 | DCHECK_EQ(false_succ->GetPredecessors().size(), 1u); |
| 411 | |
| 412 | const HUseList<HInstruction*>& uses = cond->GetUses(); |
| 413 | for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) { |
| 414 | HInstruction* user = it->GetUser(); |
| 415 | size_t index = it->GetIndex(); |
| 416 | HBasicBlock* user_block = user->GetBlock(); |
| 417 | // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput(). |
| 418 | ++it; |
| 419 | if (true_succ->Dominates(user_block)) { |
| 420 | user->ReplaceInput(graph->GetIntConstant(1), index); |
| 421 | } else if (false_succ->Dominates(user_block)) { |
| 422 | user->ReplaceInput(graph->GetIntConstant(0), index); |
| 423 | } |
| 424 | } |
| 425 | } |
| 426 | } |
| 427 | |
Artem Serov | 18ba1da | 2018-05-16 19:06:32 +0100 | [diff] [blame] | 428 | // Peel the first 'count' iterations of the loop. |
Nicolas Geoffray | 256c94b | 2019-04-29 10:55:09 +0100 | [diff] [blame] | 429 | static void PeelByCount(HLoopInformation* loop_info, |
| 430 | int count, |
| 431 | InductionVarRange* induction_range) { |
Artem Serov | 18ba1da | 2018-05-16 19:06:32 +0100 | [diff] [blame] | 432 | for (int i = 0; i < count; i++) { |
| 433 | // Perform peeling. |
Artem Serov | 0f5b2bf | 2019-10-23 14:07:41 +0100 | [diff] [blame] | 434 | LoopClonerSimpleHelper helper(loop_info, induction_range); |
Artem Serov | 18ba1da | 2018-05-16 19:06:32 +0100 | [diff] [blame] | 435 | helper.DoPeeling(); |
| 436 | } |
| 437 | } |
| 438 | |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 439 | // Returns the narrower type out of instructions a and b types. |
| 440 | static DataType::Type GetNarrowerType(HInstruction* a, HInstruction* b) { |
| 441 | DataType::Type type = a->GetType(); |
| 442 | if (DataType::Size(b->GetType()) < DataType::Size(type)) { |
| 443 | type = b->GetType(); |
| 444 | } |
| 445 | if (a->IsTypeConversion() && |
| 446 | DataType::Size(a->InputAt(0)->GetType()) < DataType::Size(type)) { |
| 447 | type = a->InputAt(0)->GetType(); |
| 448 | } |
| 449 | if (b->IsTypeConversion() && |
| 450 | DataType::Size(b->InputAt(0)->GetType()) < DataType::Size(type)) { |
| 451 | type = b->InputAt(0)->GetType(); |
| 452 | } |
| 453 | return type; |
| 454 | } |
| 455 | |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 456 | // |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 457 | // Public methods. |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 458 | // |
| 459 | |
| 460 | HLoopOptimization::HLoopOptimization(HGraph* graph, |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 461 | const CodeGenerator& codegen, |
Aart Bik | b92cc33 | 2017-09-06 15:53:17 -0700 | [diff] [blame] | 462 | HInductionVarAnalysis* induction_analysis, |
Aart Bik | 2ca10eb | 2017-11-15 15:17:53 -0800 | [diff] [blame] | 463 | OptimizingCompilerStats* stats, |
| 464 | const char* name) |
| 465 | : HOptimization(graph, name, stats), |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 466 | compiler_options_(&codegen.GetCompilerOptions()), |
| 467 | simd_register_size_(codegen.GetSIMDRegisterWidth()), |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 468 | induction_range_(induction_analysis), |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 469 | loop_allocator_(nullptr), |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 470 | global_allocator_(graph_->GetAllocator()), |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 471 | top_loop_(nullptr), |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 472 | last_loop_(nullptr), |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 473 | iset_(nullptr), |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 474 | reductions_(nullptr), |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 475 | simplified_(false), |
| 476 | vector_length_(0), |
| 477 | vector_refs_(nullptr), |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 478 | vector_static_peeling_factor_(0), |
| 479 | vector_dynamic_peeling_candidate_(nullptr), |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 480 | vector_runtime_test_a_(nullptr), |
| 481 | vector_runtime_test_b_(nullptr), |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 482 | vector_map_(nullptr), |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 483 | vector_permanent_map_(nullptr), |
| 484 | vector_mode_(kSequential), |
| 485 | vector_preheader_(nullptr), |
| 486 | vector_header_(nullptr), |
| 487 | vector_body_(nullptr), |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 488 | vector_index_(nullptr), |
Vladimir Marko | a043111 | 2018-06-25 09:32:54 +0100 | [diff] [blame] | 489 | arch_loop_helper_(ArchNoOptsLoopHelper::Create(compiler_options_ != nullptr |
| 490 | ? compiler_options_->GetInstructionSet() |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 491 | : InstructionSet::kNone, |
| 492 | global_allocator_)) { |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 493 | } |
| 494 | |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 495 | bool HLoopOptimization::Run() { |
Mingyao Yang | 01b47b0 | 2017-02-03 12:09:57 -0800 | [diff] [blame] | 496 | // Skip if there is no loop or the graph has try-catch/irreducible loops. |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 497 | // TODO: make this less of a sledgehammer. |
Mingyao Yang | 69d75ff | 2017-02-07 13:06:06 -0800 | [diff] [blame] | 498 | if (!graph_->HasLoops() || graph_->HasTryCatch() || graph_->HasIrreducibleLoops()) { |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 499 | return false; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 500 | } |
| 501 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 502 | // Phase-local allocator. |
| 503 | ScopedArenaAllocator allocator(graph_->GetArenaStack()); |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 504 | loop_allocator_ = &allocator; |
Nicolas Geoffray | ebe1674 | 2016-10-05 09:55:42 +0100 | [diff] [blame] | 505 | |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 506 | // Perform loop optimizations. |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 507 | bool didLoopOpt = LocalRun(); |
Mingyao Yang | 69d75ff | 2017-02-07 13:06:06 -0800 | [diff] [blame] | 508 | if (top_loop_ == nullptr) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 509 | graph_->SetHasLoops(false); // no more loops |
Mingyao Yang | 69d75ff | 2017-02-07 13:06:06 -0800 | [diff] [blame] | 510 | } |
| 511 | |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 512 | // Detach. |
| 513 | loop_allocator_ = nullptr; |
| 514 | last_loop_ = top_loop_ = nullptr; |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 515 | |
| 516 | return didLoopOpt; |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 517 | } |
| 518 | |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 519 | // |
| 520 | // Loop setup and traversal. |
| 521 | // |
| 522 | |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 523 | bool HLoopOptimization::LocalRun() { |
| 524 | bool didLoopOpt = false; |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 525 | // Build the linear order using the phase-local allocator. This step enables building |
| 526 | // a loop hierarchy that properly reflects the outer-inner and previous-next relation. |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 527 | ScopedArenaVector<HBasicBlock*> linear_order(loop_allocator_->Adapter(kArenaAllocLinearOrder)); |
| 528 | LinearizeGraph(graph_, &linear_order); |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 529 | |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 530 | // Build the loop hierarchy. |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 531 | for (HBasicBlock* block : linear_order) { |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 532 | if (block->IsLoopHeader()) { |
| 533 | AddLoop(block->GetLoopInformation()); |
| 534 | } |
| 535 | } |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 536 | |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 537 | // Traverse the loop hierarchy inner-to-outer and optimize. Traversal can use |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 538 | // temporary data structures using the phase-local allocator. All new HIR |
| 539 | // should use the global allocator. |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 540 | if (top_loop_ != nullptr) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 541 | ScopedArenaSet<HInstruction*> iset(loop_allocator_->Adapter(kArenaAllocLoopOptimization)); |
| 542 | ScopedArenaSafeMap<HInstruction*, HInstruction*> reds( |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 543 | std::less<HInstruction*>(), loop_allocator_->Adapter(kArenaAllocLoopOptimization)); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 544 | ScopedArenaSet<ArrayReference> refs(loop_allocator_->Adapter(kArenaAllocLoopOptimization)); |
| 545 | ScopedArenaSafeMap<HInstruction*, HInstruction*> map( |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 546 | std::less<HInstruction*>(), loop_allocator_->Adapter(kArenaAllocLoopOptimization)); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 547 | ScopedArenaSafeMap<HInstruction*, HInstruction*> perm( |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 548 | std::less<HInstruction*>(), loop_allocator_->Adapter(kArenaAllocLoopOptimization)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 549 | // Attach. |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 550 | iset_ = &iset; |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 551 | reductions_ = &reds; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 552 | vector_refs_ = &refs; |
| 553 | vector_map_ = ↦ |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 554 | vector_permanent_map_ = &perm; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 555 | // Traverse. |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 556 | didLoopOpt = TraverseLoopsInnerToOuter(top_loop_); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 557 | // Detach. |
| 558 | iset_ = nullptr; |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 559 | reductions_ = nullptr; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 560 | vector_refs_ = nullptr; |
| 561 | vector_map_ = nullptr; |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 562 | vector_permanent_map_ = nullptr; |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 563 | } |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 564 | return didLoopOpt; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 565 | } |
| 566 | |
| 567 | void HLoopOptimization::AddLoop(HLoopInformation* loop_info) { |
| 568 | DCHECK(loop_info != nullptr); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 569 | LoopNode* node = new (loop_allocator_) LoopNode(loop_info); |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 570 | if (last_loop_ == nullptr) { |
| 571 | // First loop. |
| 572 | DCHECK(top_loop_ == nullptr); |
| 573 | last_loop_ = top_loop_ = node; |
| 574 | } else if (loop_info->IsIn(*last_loop_->loop_info)) { |
| 575 | // Inner loop. |
| 576 | node->outer = last_loop_; |
| 577 | DCHECK(last_loop_->inner == nullptr); |
| 578 | last_loop_ = last_loop_->inner = node; |
| 579 | } else { |
| 580 | // Subsequent loop. |
| 581 | while (last_loop_->outer != nullptr && !loop_info->IsIn(*last_loop_->outer->loop_info)) { |
| 582 | last_loop_ = last_loop_->outer; |
| 583 | } |
| 584 | node->outer = last_loop_->outer; |
| 585 | node->previous = last_loop_; |
| 586 | DCHECK(last_loop_->next == nullptr); |
| 587 | last_loop_ = last_loop_->next = node; |
| 588 | } |
| 589 | } |
| 590 | |
| 591 | void HLoopOptimization::RemoveLoop(LoopNode* node) { |
| 592 | DCHECK(node != nullptr); |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 593 | DCHECK(node->inner == nullptr); |
| 594 | if (node->previous != nullptr) { |
| 595 | // Within sequence. |
| 596 | node->previous->next = node->next; |
| 597 | if (node->next != nullptr) { |
| 598 | node->next->previous = node->previous; |
| 599 | } |
| 600 | } else { |
| 601 | // First of sequence. |
| 602 | if (node->outer != nullptr) { |
| 603 | node->outer->inner = node->next; |
| 604 | } else { |
| 605 | top_loop_ = node->next; |
| 606 | } |
| 607 | if (node->next != nullptr) { |
| 608 | node->next->outer = node->outer; |
| 609 | node->next->previous = nullptr; |
| 610 | } |
| 611 | } |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 612 | } |
| 613 | |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 614 | bool HLoopOptimization::TraverseLoopsInnerToOuter(LoopNode* node) { |
| 615 | bool changed = false; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 616 | for ( ; node != nullptr; node = node->next) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 617 | // Visit inner loops first. Recompute induction information for this |
| 618 | // loop if the induction of any inner loop has changed. |
| 619 | if (TraverseLoopsInnerToOuter(node->inner)) { |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 620 | induction_range_.ReVisit(node->loop_info); |
Aart Bik | a8360cd | 2018-05-02 16:07:51 -0700 | [diff] [blame] | 621 | changed = true; |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 622 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 623 | // Repeat simplifications in the loop-body until no more changes occur. |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 624 | // Note that since each simplification consists of eliminating code (without |
| 625 | // introducing new code), this process is always finite. |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 626 | do { |
| 627 | simplified_ = false; |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 628 | SimplifyInduction(node); |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 629 | SimplifyBlocks(node); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 630 | changed = simplified_ || changed; |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 631 | } while (simplified_); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 632 | // Optimize inner loop. |
Aart Bik | 9abf894 | 2016-10-14 09:49:42 -0700 | [diff] [blame] | 633 | if (node->inner == nullptr) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 634 | changed = OptimizeInnerLoop(node) || changed; |
Aart Bik | 9abf894 | 2016-10-14 09:49:42 -0700 | [diff] [blame] | 635 | } |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 636 | } |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 637 | return changed; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 638 | } |
| 639 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 640 | // |
| 641 | // Optimization. |
| 642 | // |
| 643 | |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 644 | void HLoopOptimization::SimplifyInduction(LoopNode* node) { |
| 645 | HBasicBlock* header = node->loop_info->GetHeader(); |
| 646 | HBasicBlock* preheader = node->loop_info->GetPreHeader(); |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 647 | // Scan the phis in the header to find opportunities to simplify an induction |
| 648 | // cycle that is only used outside the loop. Replace these uses, if any, with |
| 649 | // the last value and remove the induction cycle. |
| 650 | // Examples: for (int i = 0; x != null; i++) { .... no i .... } |
| 651 | // for (int i = 0; i < 10; i++, k++) { .... no k .... } return k; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 652 | for (HInstructionIterator it(header->GetPhis()); !it.Done(); it.Advance()) { |
| 653 | HPhi* phi = it.Current()->AsPhi(); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 654 | if (TrySetPhiInduction(phi, /*restrict_uses*/ true) && |
| 655 | TryAssignLastValue(node->loop_info, phi, preheader, /*collect_loop_uses*/ false)) { |
Aart Bik | 671e48a | 2017-08-09 13:16:56 -0700 | [diff] [blame] | 656 | // Note that it's ok to have replaced uses after the loop with the last value, without |
| 657 | // being able to remove the cycle. Environment uses (which are the reason we may not be |
| 658 | // able to remove the cycle) within the loop will still hold the right value. We must |
| 659 | // have tried first, however, to replace outside uses. |
| 660 | if (CanRemoveCycle()) { |
| 661 | simplified_ = true; |
| 662 | for (HInstruction* i : *iset_) { |
| 663 | RemoveFromCycle(i); |
| 664 | } |
| 665 | DCHECK(CheckInductionSetFullyRemoved(iset_)); |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 666 | } |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 667 | } |
| 668 | } |
| 669 | } |
| 670 | |
| 671 | void HLoopOptimization::SimplifyBlocks(LoopNode* node) { |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 672 | // Iterate over all basic blocks in the loop-body. |
| 673 | for (HBlocksInLoopIterator it(*node->loop_info); !it.Done(); it.Advance()) { |
| 674 | HBasicBlock* block = it.Current(); |
| 675 | // Remove dead instructions from the loop-body. |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 676 | RemoveDeadInstructions(block->GetPhis()); |
| 677 | RemoveDeadInstructions(block->GetInstructions()); |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 678 | // Remove trivial control flow blocks from the loop-body. |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 679 | if (block->GetPredecessors().size() == 1 && |
| 680 | block->GetSuccessors().size() == 1 && |
| 681 | block->GetSingleSuccessor()->GetPredecessors().size() == 1) { |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 682 | simplified_ = true; |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 683 | block->MergeWith(block->GetSingleSuccessor()); |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 684 | } else if (block->GetSuccessors().size() == 2) { |
| 685 | // Trivial if block can be bypassed to either branch. |
| 686 | HBasicBlock* succ0 = block->GetSuccessors()[0]; |
| 687 | HBasicBlock* succ1 = block->GetSuccessors()[1]; |
| 688 | HBasicBlock* meet0 = nullptr; |
| 689 | HBasicBlock* meet1 = nullptr; |
| 690 | if (succ0 != succ1 && |
| 691 | IsGotoBlock(succ0, &meet0) && |
| 692 | IsGotoBlock(succ1, &meet1) && |
| 693 | meet0 == meet1 && // meets again |
| 694 | meet0 != block && // no self-loop |
| 695 | meet0->GetPhis().IsEmpty()) { // not used for merging |
| 696 | simplified_ = true; |
| 697 | succ0->DisconnectAndDelete(); |
| 698 | if (block->Dominates(meet0)) { |
| 699 | block->RemoveDominatedBlock(meet0); |
| 700 | succ1->AddDominatedBlock(meet0); |
| 701 | meet0->SetDominator(succ1); |
Aart Bik | e3dedc5 | 2016-11-02 17:50:27 -0700 | [diff] [blame] | 702 | } |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 703 | } |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 704 | } |
Aart Bik | df7822e | 2016-12-06 10:05:30 -0800 | [diff] [blame] | 705 | } |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 706 | } |
| 707 | |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 708 | bool HLoopOptimization::TryOptimizeInnerLoopFinite(LoopNode* node) { |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 709 | HBasicBlock* header = node->loop_info->GetHeader(); |
| 710 | HBasicBlock* preheader = node->loop_info->GetPreHeader(); |
Aart Bik | 9abf894 | 2016-10-14 09:49:42 -0700 | [diff] [blame] | 711 | // Ensure loop header logic is finite. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 712 | int64_t trip_count = 0; |
| 713 | if (!induction_range_.IsFinite(node->loop_info, &trip_count)) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 714 | return false; |
Aart Bik | 9abf894 | 2016-10-14 09:49:42 -0700 | [diff] [blame] | 715 | } |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 716 | // Ensure there is only a single loop-body (besides the header). |
| 717 | HBasicBlock* body = nullptr; |
| 718 | for (HBlocksInLoopIterator it(*node->loop_info); !it.Done(); it.Advance()) { |
| 719 | if (it.Current() != header) { |
| 720 | if (body != nullptr) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 721 | return false; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 722 | } |
| 723 | body = it.Current(); |
| 724 | } |
| 725 | } |
Andreas Gampe | f45d61c | 2017-06-07 10:29:33 -0700 | [diff] [blame] | 726 | CHECK(body != nullptr); |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 727 | // Ensure there is only a single exit point. |
| 728 | if (header->GetSuccessors().size() != 2) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 729 | return false; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 730 | } |
| 731 | HBasicBlock* exit = (header->GetSuccessors()[0] == body) |
| 732 | ? header->GetSuccessors()[1] |
| 733 | : header->GetSuccessors()[0]; |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 734 | // Ensure exit can only be reached by exiting loop. |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 735 | if (exit->GetPredecessors().size() != 1) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 736 | return false; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 737 | } |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 738 | // Detect either an empty loop (no side effects other than plain iteration) or |
| 739 | // a trivial loop (just iterating once). Replace subsequent index uses, if any, |
| 740 | // with the last value and remove the loop, possibly after unrolling its body. |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 741 | HPhi* main_phi = nullptr; |
| 742 | if (TrySetSimpleLoopHeader(header, &main_phi)) { |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 743 | bool is_empty = IsEmptyBody(body); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 744 | if (reductions_->empty() && // TODO: possible with some effort |
| 745 | (is_empty || trip_count == 1) && |
| 746 | TryAssignLastValue(node->loop_info, main_phi, preheader, /*collect_loop_uses*/ true)) { |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 747 | if (!is_empty) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 748 | // Unroll the loop-body, which sees initial value of the index. |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 749 | main_phi->ReplaceWith(main_phi->InputAt(0)); |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 750 | preheader->MergeInstructionsWith(body); |
| 751 | } |
| 752 | body->DisconnectAndDelete(); |
| 753 | exit->RemovePredecessor(header); |
| 754 | header->RemoveSuccessor(exit); |
| 755 | header->RemoveDominatedBlock(exit); |
| 756 | header->DisconnectAndDelete(); |
| 757 | preheader->AddSuccessor(exit); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 758 | preheader->AddInstruction(new (global_allocator_) HGoto()); |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 759 | preheader->AddDominatedBlock(exit); |
| 760 | exit->SetDominator(preheader); |
| 761 | RemoveLoop(node); // update hierarchy |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 762 | return true; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 763 | } |
| 764 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 765 | // Vectorize loop, if possible and valid. |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 766 | if (kEnableVectorization && |
Artem Serov | e65ade7 | 2019-07-25 21:04:16 +0100 | [diff] [blame] | 767 | // Disable vectorization for debuggable graphs: this is a workaround for the bug |
| 768 | // in 'GenerateNewLoop' which caused the SuspendCheck environment to be invalid. |
| 769 | // TODO: b/138601207, investigate other possible cases with wrong environment values and |
| 770 | // possibly switch back vectorization on for debuggable graphs. |
| 771 | !graph_->IsDebuggable() && |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 772 | TrySetSimpleLoopHeader(header, &main_phi) && |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 773 | ShouldVectorize(node, body, trip_count) && |
| 774 | TryAssignLastValue(node->loop_info, main_phi, preheader, /*collect_loop_uses*/ true)) { |
| 775 | Vectorize(node, body, exit, trip_count); |
| 776 | graph_->SetHasSIMD(true); // flag SIMD usage |
Aart Bik | 21b8592 | 2017-09-06 13:29:16 -0700 | [diff] [blame] | 777 | MaybeRecordStat(stats_, MethodCompilationStat::kLoopVectorized); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 778 | return true; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 779 | } |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 780 | return false; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 781 | } |
| 782 | |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 783 | bool HLoopOptimization::OptimizeInnerLoop(LoopNode* node) { |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 784 | return TryOptimizeInnerLoopFinite(node) || TryPeelingAndUnrolling(node); |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 785 | } |
| 786 | |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 787 | |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 788 | |
| 789 | // |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 790 | // Scalar loop peeling and unrolling: generic part methods. |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 791 | // |
| 792 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 793 | bool HLoopOptimization::TryUnrollingForBranchPenaltyReduction(LoopAnalysisInfo* analysis_info, |
| 794 | bool generate_code) { |
| 795 | if (analysis_info->GetNumberOfExits() > 1) { |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 796 | return false; |
| 797 | } |
| 798 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 799 | uint32_t unrolling_factor = arch_loop_helper_->GetScalarUnrollingFactor(analysis_info); |
| 800 | if (unrolling_factor == LoopAnalysisInfo::kNoUnrollingFactor) { |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 801 | return false; |
| 802 | } |
| 803 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 804 | if (generate_code) { |
| 805 | // TODO: support other unrolling factors. |
| 806 | DCHECK_EQ(unrolling_factor, 2u); |
| 807 | |
| 808 | // Perform unrolling. |
| 809 | HLoopInformation* loop_info = analysis_info->GetLoopInfo(); |
Artem Serov | 0f5b2bf | 2019-10-23 14:07:41 +0100 | [diff] [blame] | 810 | LoopClonerSimpleHelper helper(loop_info, &induction_range_); |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 811 | helper.DoUnrolling(); |
| 812 | |
| 813 | // Remove the redundant loop check after unrolling. |
| 814 | HIf* copy_hif = |
| 815 | helper.GetBasicBlockMap()->Get(loop_info->GetHeader())->GetLastInstruction()->AsIf(); |
| 816 | int32_t constant = loop_info->Contains(*copy_hif->IfTrueSuccessor()) ? 1 : 0; |
| 817 | copy_hif->ReplaceInput(graph_->GetIntConstant(constant), 0u); |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 818 | } |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 819 | return true; |
| 820 | } |
| 821 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 822 | bool HLoopOptimization::TryPeelingForLoopInvariantExitsElimination(LoopAnalysisInfo* analysis_info, |
| 823 | bool generate_code) { |
| 824 | HLoopInformation* loop_info = analysis_info->GetLoopInfo(); |
Artem Serov | 72411e6 | 2017-10-19 16:18:07 +0100 | [diff] [blame] | 825 | if (!arch_loop_helper_->IsLoopPeelingEnabled()) { |
| 826 | return false; |
| 827 | } |
| 828 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 829 | if (analysis_info->GetNumberOfInvariantExits() == 0) { |
Artem Serov | 72411e6 | 2017-10-19 16:18:07 +0100 | [diff] [blame] | 830 | return false; |
| 831 | } |
| 832 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 833 | if (generate_code) { |
| 834 | // Perform peeling. |
Artem Serov | 0f5b2bf | 2019-10-23 14:07:41 +0100 | [diff] [blame] | 835 | LoopClonerSimpleHelper helper(loop_info, &induction_range_); |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 836 | helper.DoPeeling(); |
Artem Serov | 72411e6 | 2017-10-19 16:18:07 +0100 | [diff] [blame] | 837 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 838 | // Statically evaluate loop check after peeling for loop invariant condition. |
| 839 | const SuperblockCloner::HInstructionMap* hir_map = helper.GetInstructionMap(); |
| 840 | for (auto entry : *hir_map) { |
| 841 | HInstruction* copy = entry.second; |
| 842 | if (copy->IsIf()) { |
| 843 | TryToEvaluateIfCondition(copy->AsIf(), graph_); |
| 844 | } |
Artem Serov | 72411e6 | 2017-10-19 16:18:07 +0100 | [diff] [blame] | 845 | } |
| 846 | } |
| 847 | |
| 848 | return true; |
| 849 | } |
| 850 | |
Artem Serov | 18ba1da | 2018-05-16 19:06:32 +0100 | [diff] [blame] | 851 | bool HLoopOptimization::TryFullUnrolling(LoopAnalysisInfo* analysis_info, bool generate_code) { |
| 852 | // Fully unroll loops with a known and small trip count. |
| 853 | int64_t trip_count = analysis_info->GetTripCount(); |
| 854 | if (!arch_loop_helper_->IsLoopPeelingEnabled() || |
| 855 | trip_count == LoopAnalysisInfo::kUnknownTripCount || |
| 856 | !arch_loop_helper_->IsFullUnrollingBeneficial(analysis_info)) { |
| 857 | return false; |
| 858 | } |
| 859 | |
| 860 | if (generate_code) { |
| 861 | // Peeling of the N first iterations (where N equals to the trip count) will effectively |
| 862 | // eliminate the loop: after peeling we will have N sequential iterations copied into the loop |
| 863 | // preheader and the original loop. The trip count of this loop will be 0 as the sequential |
| 864 | // iterations are executed first and there are exactly N of them. Thus we can statically |
| 865 | // evaluate the loop exit condition to 'false' and fully eliminate it. |
| 866 | // |
| 867 | // Here is an example of full unrolling of a loop with a trip count 2: |
| 868 | // |
| 869 | // loop_cond_1 |
| 870 | // loop_body_1 <- First iteration. |
| 871 | // | |
| 872 | // \ v |
| 873 | // ==\ loop_cond_2 |
| 874 | // ==/ loop_body_2 <- Second iteration. |
| 875 | // / | |
| 876 | // <- v <- |
| 877 | // loop_cond \ loop_cond \ <- This cond is always false. |
| 878 | // loop_body _/ loop_body _/ |
| 879 | // |
| 880 | HLoopInformation* loop_info = analysis_info->GetLoopInfo(); |
Nicolas Geoffray | 256c94b | 2019-04-29 10:55:09 +0100 | [diff] [blame] | 881 | PeelByCount(loop_info, trip_count, &induction_range_); |
Artem Serov | 18ba1da | 2018-05-16 19:06:32 +0100 | [diff] [blame] | 882 | HIf* loop_hif = loop_info->GetHeader()->GetLastInstruction()->AsIf(); |
| 883 | int32_t constant = loop_info->Contains(*loop_hif->IfTrueSuccessor()) ? 0 : 1; |
| 884 | loop_hif->ReplaceInput(graph_->GetIntConstant(constant), 0u); |
| 885 | } |
| 886 | |
| 887 | return true; |
| 888 | } |
| 889 | |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 890 | bool HLoopOptimization::TryPeelingAndUnrolling(LoopNode* node) { |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 891 | HLoopInformation* loop_info = node->loop_info; |
| 892 | int64_t trip_count = LoopAnalysis::GetLoopTripCount(loop_info, &induction_range_); |
| 893 | LoopAnalysisInfo analysis_info(loop_info); |
| 894 | LoopAnalysis::CalculateLoopBasicProperties(loop_info, &analysis_info, trip_count); |
| 895 | |
| 896 | if (analysis_info.HasInstructionsPreventingScalarOpts() || |
| 897 | arch_loop_helper_->IsLoopNonBeneficialForScalarOpts(&analysis_info)) { |
| 898 | return false; |
| 899 | } |
| 900 | |
Artem Serov | 18ba1da | 2018-05-16 19:06:32 +0100 | [diff] [blame] | 901 | if (!TryFullUnrolling(&analysis_info, /*generate_code*/ false) && |
| 902 | !TryPeelingForLoopInvariantExitsElimination(&analysis_info, /*generate_code*/ false) && |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 903 | !TryUnrollingForBranchPenaltyReduction(&analysis_info, /*generate_code*/ false)) { |
| 904 | return false; |
| 905 | } |
| 906 | |
| 907 | // Run 'IsLoopClonable' the last as it might be time-consuming. |
Artem Serov | 0f5b2bf | 2019-10-23 14:07:41 +0100 | [diff] [blame] | 908 | if (!LoopClonerHelper::IsLoopClonable(loop_info)) { |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 909 | return false; |
| 910 | } |
| 911 | |
Artem Serov | 18ba1da | 2018-05-16 19:06:32 +0100 | [diff] [blame] | 912 | return TryFullUnrolling(&analysis_info) || |
| 913 | TryPeelingForLoopInvariantExitsElimination(&analysis_info) || |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 914 | TryUnrollingForBranchPenaltyReduction(&analysis_info); |
| 915 | } |
| 916 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 917 | // |
| 918 | // Loop vectorization. The implementation is based on the book by Aart J.C. Bik: |
| 919 | // "The Software Vectorization Handbook. Applying Multimedia Extensions for Maximum Performance." |
| 920 | // Intel Press, June, 2004 (http://www.aartbik.com/). |
| 921 | // |
| 922 | |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 923 | bool HLoopOptimization::ShouldVectorize(LoopNode* node, HBasicBlock* block, int64_t trip_count) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 924 | // Reset vector bookkeeping. |
| 925 | vector_length_ = 0; |
| 926 | vector_refs_->clear(); |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 927 | vector_static_peeling_factor_ = 0; |
| 928 | vector_dynamic_peeling_candidate_ = nullptr; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 929 | vector_runtime_test_a_ = |
Igor Murashkin | 2ffb703 | 2017-11-08 13:35:21 -0800 | [diff] [blame] | 930 | vector_runtime_test_b_ = nullptr; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 931 | |
| 932 | // Phis in the loop-body prevent vectorization. |
| 933 | if (!block->GetPhis().IsEmpty()) { |
| 934 | return false; |
| 935 | } |
| 936 | |
| 937 | // Scan the loop-body, starting a right-hand-side tree traversal at each left-hand-side |
| 938 | // occurrence, which allows passing down attributes down the use tree. |
| 939 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 940 | if (!VectorizeDef(node, it.Current(), /*generate_code*/ false)) { |
| 941 | return false; // failure to vectorize a left-hand-side |
| 942 | } |
| 943 | } |
| 944 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 945 | // Prepare alignment analysis: |
| 946 | // (1) find desired alignment (SIMD vector size in bytes). |
| 947 | // (2) initialize static loop peeling votes (peeling factor that will |
| 948 | // make one particular reference aligned), never to exceed (1). |
| 949 | // (3) variable to record how many references share same alignment. |
| 950 | // (4) variable to record suitable candidate for dynamic loop peeling. |
| 951 | uint32_t desired_alignment = GetVectorSizeInBytes(); |
| 952 | DCHECK_LE(desired_alignment, 16u); |
| 953 | uint32_t peeling_votes[16] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; |
| 954 | uint32_t max_num_same_alignment = 0; |
| 955 | const ArrayReference* peeling_candidate = nullptr; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 956 | |
| 957 | // Data dependence analysis. Find each pair of references with same type, where |
| 958 | // at least one is a write. Each such pair denotes a possible data dependence. |
| 959 | // This analysis exploits the property that differently typed arrays cannot be |
| 960 | // aliased, as well as the property that references either point to the same |
| 961 | // array or to two completely disjoint arrays, i.e., no partial aliasing. |
| 962 | // Other than a few simply heuristics, no detailed subscript analysis is done. |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 963 | // The scan over references also prepares finding a suitable alignment strategy. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 964 | for (auto i = vector_refs_->begin(); i != vector_refs_->end(); ++i) { |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 965 | uint32_t num_same_alignment = 0; |
| 966 | // Scan over all next references. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 967 | for (auto j = i; ++j != vector_refs_->end(); ) { |
| 968 | if (i->type == j->type && (i->lhs || j->lhs)) { |
| 969 | // Found same-typed a[i+x] vs. b[i+y], where at least one is a write. |
| 970 | HInstruction* a = i->base; |
| 971 | HInstruction* b = j->base; |
| 972 | HInstruction* x = i->offset; |
| 973 | HInstruction* y = j->offset; |
| 974 | if (a == b) { |
| 975 | // Found a[i+x] vs. a[i+y]. Accept if x == y (loop-independent data dependence). |
| 976 | // Conservatively assume a loop-carried data dependence otherwise, and reject. |
| 977 | if (x != y) { |
| 978 | return false; |
| 979 | } |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 980 | // Count the number of references that have the same alignment (since |
| 981 | // base and offset are the same) and where at least one is a write, so |
| 982 | // e.g. a[i] = a[i] + b[i] counts a[i] but not b[i]). |
| 983 | num_same_alignment++; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 984 | } else { |
| 985 | // Found a[i+x] vs. b[i+y]. Accept if x == y (at worst loop-independent data dependence). |
| 986 | // Conservatively assume a potential loop-carried data dependence otherwise, avoided by |
| 987 | // generating an explicit a != b disambiguation runtime test on the two references. |
| 988 | if (x != y) { |
Aart Bik | 37dc4df | 2017-06-28 14:08:00 -0700 | [diff] [blame] | 989 | // To avoid excessive overhead, we only accept one a != b test. |
| 990 | if (vector_runtime_test_a_ == nullptr) { |
| 991 | // First test found. |
| 992 | vector_runtime_test_a_ = a; |
| 993 | vector_runtime_test_b_ = b; |
| 994 | } else if ((vector_runtime_test_a_ != a || vector_runtime_test_b_ != b) && |
| 995 | (vector_runtime_test_a_ != b || vector_runtime_test_b_ != a)) { |
| 996 | return false; // second test would be needed |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 997 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 998 | } |
| 999 | } |
| 1000 | } |
| 1001 | } |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1002 | // Update information for finding suitable alignment strategy: |
| 1003 | // (1) update votes for static loop peeling, |
| 1004 | // (2) update suitable candidate for dynamic loop peeling. |
| 1005 | Alignment alignment = ComputeAlignment(i->offset, i->type, i->is_string_char_at); |
| 1006 | if (alignment.Base() >= desired_alignment) { |
| 1007 | // If the array/string object has a known, sufficient alignment, use the |
| 1008 | // initial offset to compute the static loop peeling vote (this always |
| 1009 | // works, since elements have natural alignment). |
| 1010 | uint32_t offset = alignment.Offset() & (desired_alignment - 1u); |
| 1011 | uint32_t vote = (offset == 0) |
| 1012 | ? 0 |
| 1013 | : ((desired_alignment - offset) >> DataType::SizeShift(i->type)); |
| 1014 | DCHECK_LT(vote, 16u); |
| 1015 | ++peeling_votes[vote]; |
| 1016 | } else if (BaseAlignment() >= desired_alignment && |
| 1017 | num_same_alignment > max_num_same_alignment) { |
| 1018 | // Otherwise, if the array/string object has a known, sufficient alignment |
| 1019 | // for just the base but with an unknown offset, record the candidate with |
| 1020 | // the most occurrences for dynamic loop peeling (again, the peeling always |
| 1021 | // works, since elements have natural alignment). |
| 1022 | max_num_same_alignment = num_same_alignment; |
| 1023 | peeling_candidate = &(*i); |
| 1024 | } |
| 1025 | } // for i |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1026 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1027 | // Find a suitable alignment strategy. |
| 1028 | SetAlignmentStrategy(peeling_votes, peeling_candidate); |
| 1029 | |
| 1030 | // Does vectorization seem profitable? |
| 1031 | if (!IsVectorizationProfitable(trip_count)) { |
| 1032 | return false; |
| 1033 | } |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1034 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1035 | // Success! |
| 1036 | return true; |
| 1037 | } |
| 1038 | |
| 1039 | void HLoopOptimization::Vectorize(LoopNode* node, |
| 1040 | HBasicBlock* block, |
| 1041 | HBasicBlock* exit, |
| 1042 | int64_t trip_count) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1043 | HBasicBlock* header = node->loop_info->GetHeader(); |
| 1044 | HBasicBlock* preheader = node->loop_info->GetPreHeader(); |
| 1045 | |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1046 | // Pick a loop unrolling factor for the vector loop. |
Artem Serov | 121f203 | 2017-10-23 19:19:06 +0100 | [diff] [blame] | 1047 | uint32_t unroll = arch_loop_helper_->GetSIMDUnrollingFactor( |
| 1048 | block, trip_count, MaxNumberPeeled(), vector_length_); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1049 | uint32_t chunk = vector_length_ * unroll; |
| 1050 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1051 | DCHECK(trip_count == 0 || (trip_count >= MaxNumberPeeled() + chunk)); |
| 1052 | |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1053 | // A cleanup loop is needed, at least, for any unknown trip count or |
| 1054 | // for a known trip count with remainder iterations after vectorization. |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1055 | bool needs_cleanup = trip_count == 0 || |
| 1056 | ((trip_count - vector_static_peeling_factor_) % chunk) != 0; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1057 | |
| 1058 | // Adjust vector bookkeeping. |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 1059 | HPhi* main_phi = nullptr; |
| 1060 | bool is_simple_loop_header = TrySetSimpleLoopHeader(header, &main_phi); // refills sets |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1061 | DCHECK(is_simple_loop_header); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1062 | vector_header_ = header; |
| 1063 | vector_body_ = block; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1064 | |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1065 | // Loop induction type. |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1066 | DataType::Type induc_type = main_phi->GetType(); |
| 1067 | DCHECK(induc_type == DataType::Type::kInt32 || induc_type == DataType::Type::kInt64) |
| 1068 | << induc_type; |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1069 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1070 | // Generate the trip count for static or dynamic loop peeling, if needed: |
| 1071 | // ptc = <peeling factor>; |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1072 | HInstruction* ptc = nullptr; |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1073 | if (vector_static_peeling_factor_ != 0) { |
| 1074 | // Static loop peeling for SIMD alignment (using the most suitable |
| 1075 | // fixed peeling factor found during prior alignment analysis). |
| 1076 | DCHECK(vector_dynamic_peeling_candidate_ == nullptr); |
| 1077 | ptc = graph_->GetConstant(induc_type, vector_static_peeling_factor_); |
| 1078 | } else if (vector_dynamic_peeling_candidate_ != nullptr) { |
| 1079 | // Dynamic loop peeling for SIMD alignment (using the most suitable |
| 1080 | // candidate found during prior alignment analysis): |
| 1081 | // rem = offset % ALIGN; // adjusted as #elements |
| 1082 | // ptc = rem == 0 ? 0 : (ALIGN - rem); |
| 1083 | uint32_t shift = DataType::SizeShift(vector_dynamic_peeling_candidate_->type); |
| 1084 | uint32_t align = GetVectorSizeInBytes() >> shift; |
| 1085 | uint32_t hidden_offset = HiddenOffset(vector_dynamic_peeling_candidate_->type, |
| 1086 | vector_dynamic_peeling_candidate_->is_string_char_at); |
| 1087 | HInstruction* adjusted_offset = graph_->GetConstant(induc_type, hidden_offset >> shift); |
| 1088 | HInstruction* offset = Insert(preheader, new (global_allocator_) HAdd( |
| 1089 | induc_type, vector_dynamic_peeling_candidate_->offset, adjusted_offset)); |
| 1090 | HInstruction* rem = Insert(preheader, new (global_allocator_) HAnd( |
| 1091 | induc_type, offset, graph_->GetConstant(induc_type, align - 1u))); |
| 1092 | HInstruction* sub = Insert(preheader, new (global_allocator_) HSub( |
| 1093 | induc_type, graph_->GetConstant(induc_type, align), rem)); |
| 1094 | HInstruction* cond = Insert(preheader, new (global_allocator_) HEqual( |
| 1095 | rem, graph_->GetConstant(induc_type, 0))); |
| 1096 | ptc = Insert(preheader, new (global_allocator_) HSelect( |
| 1097 | cond, graph_->GetConstant(induc_type, 0), sub, kNoDexPc)); |
| 1098 | needs_cleanup = true; // don't know the exact amount |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1099 | } |
| 1100 | |
| 1101 | // Generate loop control: |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1102 | // stc = <trip-count>; |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1103 | // ptc = min(stc, ptc); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1104 | // vtc = stc - (stc - ptc) % chunk; |
| 1105 | // i = 0; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1106 | HInstruction* stc = induction_range_.GenerateTripCount(node->loop_info, graph_, preheader); |
| 1107 | HInstruction* vtc = stc; |
| 1108 | if (needs_cleanup) { |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1109 | DCHECK(IsPowerOfTwo(chunk)); |
| 1110 | HInstruction* diff = stc; |
| 1111 | if (ptc != nullptr) { |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1112 | if (trip_count == 0) { |
| 1113 | HInstruction* cond = Insert(preheader, new (global_allocator_) HAboveOrEqual(stc, ptc)); |
| 1114 | ptc = Insert(preheader, new (global_allocator_) HSelect(cond, ptc, stc, kNoDexPc)); |
| 1115 | } |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1116 | diff = Insert(preheader, new (global_allocator_) HSub(induc_type, stc, ptc)); |
| 1117 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1118 | HInstruction* rem = Insert( |
| 1119 | preheader, new (global_allocator_) HAnd(induc_type, |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1120 | diff, |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1121 | graph_->GetConstant(induc_type, chunk - 1))); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1122 | vtc = Insert(preheader, new (global_allocator_) HSub(induc_type, stc, rem)); |
| 1123 | } |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1124 | vector_index_ = graph_->GetConstant(induc_type, 0); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1125 | |
| 1126 | // Generate runtime disambiguation test: |
| 1127 | // vtc = a != b ? vtc : 0; |
| 1128 | if (vector_runtime_test_a_ != nullptr) { |
| 1129 | HInstruction* rt = Insert( |
| 1130 | preheader, |
| 1131 | new (global_allocator_) HNotEqual(vector_runtime_test_a_, vector_runtime_test_b_)); |
| 1132 | vtc = Insert(preheader, |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1133 | new (global_allocator_) |
| 1134 | HSelect(rt, vtc, graph_->GetConstant(induc_type, 0), kNoDexPc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1135 | needs_cleanup = true; |
| 1136 | } |
| 1137 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1138 | // Generate alignment peeling loop, if needed: |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1139 | // for ( ; i < ptc; i += 1) |
| 1140 | // <loop-body> |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1141 | // |
| 1142 | // NOTE: The alignment forced by the peeling loop is preserved even if data is |
| 1143 | // moved around during suspend checks, since all analysis was based on |
| 1144 | // nothing more than the Android runtime alignment conventions. |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1145 | if (ptc != nullptr) { |
| 1146 | vector_mode_ = kSequential; |
| 1147 | GenerateNewLoop(node, |
| 1148 | block, |
| 1149 | graph_->TransformLoopForVectorization(vector_header_, vector_body_, exit), |
| 1150 | vector_index_, |
| 1151 | ptc, |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1152 | graph_->GetConstant(induc_type, 1), |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 1153 | LoopAnalysisInfo::kNoUnrollingFactor); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1154 | } |
| 1155 | |
| 1156 | // Generate vector loop, possibly further unrolled: |
| 1157 | // for ( ; i < vtc; i += chunk) |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1158 | // <vectorized-loop-body> |
| 1159 | vector_mode_ = kVector; |
| 1160 | GenerateNewLoop(node, |
| 1161 | block, |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1162 | graph_->TransformLoopForVectorization(vector_header_, vector_body_, exit), |
| 1163 | vector_index_, |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1164 | vtc, |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1165 | graph_->GetConstant(induc_type, vector_length_), // increment per unroll |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1166 | unroll); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1167 | HLoopInformation* vloop = vector_header_->GetLoopInformation(); |
| 1168 | |
| 1169 | // Generate cleanup loop, if needed: |
| 1170 | // for ( ; i < stc; i += 1) |
| 1171 | // <loop-body> |
| 1172 | if (needs_cleanup) { |
| 1173 | vector_mode_ = kSequential; |
| 1174 | GenerateNewLoop(node, |
| 1175 | block, |
| 1176 | graph_->TransformLoopForVectorization(vector_header_, vector_body_, exit), |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1177 | vector_index_, |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1178 | stc, |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1179 | graph_->GetConstant(induc_type, 1), |
Artem Serov | 0e32908 | 2018-06-12 10:23:27 +0100 | [diff] [blame] | 1180 | LoopAnalysisInfo::kNoUnrollingFactor); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1181 | } |
| 1182 | |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1183 | // Link reductions to their final uses. |
| 1184 | for (auto i = reductions_->begin(); i != reductions_->end(); ++i) { |
| 1185 | if (i->first->IsPhi()) { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1186 | HInstruction* phi = i->first; |
| 1187 | HInstruction* repl = ReduceAndExtractIfNeeded(i->second); |
| 1188 | // Deal with regular uses. |
| 1189 | for (const HUseListNode<HInstruction*>& use : phi->GetUses()) { |
| 1190 | induction_range_.Replace(use.GetUser(), phi, repl); // update induction use |
| 1191 | } |
| 1192 | phi->ReplaceWith(repl); |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1193 | } |
| 1194 | } |
| 1195 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1196 | // Remove the original loop by disconnecting the body block |
| 1197 | // and removing all instructions from the header. |
| 1198 | block->DisconnectAndDelete(); |
| 1199 | while (!header->GetFirstInstruction()->IsGoto()) { |
| 1200 | header->RemoveInstruction(header->GetFirstInstruction()); |
| 1201 | } |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 1202 | |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1203 | // Update loop hierarchy: the old header now resides in the same outer loop |
| 1204 | // as the old preheader. Note that we don't bother putting sequential |
| 1205 | // loops back in the hierarchy at this point. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1206 | header->SetLoopInformation(preheader->GetLoopInformation()); // outward |
| 1207 | node->loop_info = vloop; |
| 1208 | } |
| 1209 | |
| 1210 | void HLoopOptimization::GenerateNewLoop(LoopNode* node, |
| 1211 | HBasicBlock* block, |
| 1212 | HBasicBlock* new_preheader, |
| 1213 | HInstruction* lo, |
| 1214 | HInstruction* hi, |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1215 | HInstruction* step, |
| 1216 | uint32_t unroll) { |
| 1217 | DCHECK(unroll == 1 || vector_mode_ == kVector); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1218 | DataType::Type induc_type = lo->GetType(); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1219 | // Prepare new loop. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1220 | vector_preheader_ = new_preheader, |
| 1221 | vector_header_ = vector_preheader_->GetSingleSuccessor(); |
| 1222 | vector_body_ = vector_header_->GetSuccessors()[1]; |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1223 | HPhi* phi = new (global_allocator_) HPhi(global_allocator_, |
| 1224 | kNoRegNumber, |
| 1225 | 0, |
| 1226 | HPhi::ToPhiType(induc_type)); |
Aart Bik | b07d1bc | 2017-04-05 10:03:15 -0700 | [diff] [blame] | 1227 | // Generate header and prepare body. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1228 | // for (i = lo; i < hi; i += step) |
| 1229 | // <loop-body> |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1230 | HInstruction* cond = new (global_allocator_) HAboveOrEqual(phi, hi); |
| 1231 | vector_header_->AddPhi(phi); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1232 | vector_header_->AddInstruction(cond); |
| 1233 | vector_header_->AddInstruction(new (global_allocator_) HIf(cond)); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1234 | vector_index_ = phi; |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1235 | vector_permanent_map_->clear(); // preserved over unrolling |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1236 | for (uint32_t u = 0; u < unroll; u++) { |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1237 | // Generate instruction map. |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1238 | vector_map_->clear(); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1239 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 1240 | bool vectorized_def = VectorizeDef(node, it.Current(), /*generate_code*/ true); |
| 1241 | DCHECK(vectorized_def); |
| 1242 | } |
| 1243 | // Generate body from the instruction map, but in original program order. |
| 1244 | HEnvironment* env = vector_header_->GetFirstInstruction()->GetEnvironment(); |
| 1245 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 1246 | auto i = vector_map_->find(it.Current()); |
| 1247 | if (i != vector_map_->end() && !i->second->IsInBlock()) { |
| 1248 | Insert(vector_body_, i->second); |
| 1249 | // Deal with instructions that need an environment, such as the scalar intrinsics. |
| 1250 | if (i->second->NeedsEnvironment()) { |
| 1251 | i->second->CopyEnvironmentFromWithLoopPhiAdjustment(env, vector_header_); |
| 1252 | } |
| 1253 | } |
| 1254 | } |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1255 | // Generate the induction. |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1256 | vector_index_ = new (global_allocator_) HAdd(induc_type, vector_index_, step); |
| 1257 | Insert(vector_body_, vector_index_); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1258 | } |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1259 | // Finalize phi inputs for the reductions (if any). |
| 1260 | for (auto i = reductions_->begin(); i != reductions_->end(); ++i) { |
| 1261 | if (!i->first->IsPhi()) { |
| 1262 | DCHECK(i->second->IsPhi()); |
| 1263 | GenerateVecReductionPhiInputs(i->second->AsPhi(), i->first); |
| 1264 | } |
| 1265 | } |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 1266 | // Finalize phi inputs for the loop index. |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1267 | phi->AddInput(lo); |
| 1268 | phi->AddInput(vector_index_); |
| 1269 | vector_index_ = phi; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1270 | } |
| 1271 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1272 | bool HLoopOptimization::VectorizeDef(LoopNode* node, |
| 1273 | HInstruction* instruction, |
| 1274 | bool generate_code) { |
| 1275 | // Accept a left-hand-side array base[index] for |
| 1276 | // (1) supported vector type, |
| 1277 | // (2) loop-invariant base, |
| 1278 | // (3) unit stride index, |
| 1279 | // (4) vectorizable right-hand-side value. |
| 1280 | uint64_t restrictions = kNone; |
Georgia Kouveli | bac080b | 2019-01-31 16:12:16 +0000 | [diff] [blame] | 1281 | // Don't accept expressions that can throw. |
| 1282 | if (instruction->CanThrow()) { |
| 1283 | return false; |
| 1284 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1285 | if (instruction->IsArraySet()) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1286 | DataType::Type type = instruction->AsArraySet()->GetComponentType(); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1287 | HInstruction* base = instruction->InputAt(0); |
| 1288 | HInstruction* index = instruction->InputAt(1); |
| 1289 | HInstruction* value = instruction->InputAt(2); |
| 1290 | HInstruction* offset = nullptr; |
Aart Bik | 6d05700 | 2018-04-09 15:39:58 -0700 | [diff] [blame] | 1291 | // For narrow types, explicit type conversion may have been |
| 1292 | // optimized way, so set the no hi bits restriction here. |
| 1293 | if (DataType::Size(type) <= 2) { |
| 1294 | restrictions |= kNoHiBits; |
| 1295 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1296 | if (TrySetVectorType(type, &restrictions) && |
| 1297 | node->loop_info->IsDefinedOutOfTheLoop(base) && |
Aart Bik | 37dc4df | 2017-06-28 14:08:00 -0700 | [diff] [blame] | 1298 | induction_range_.IsUnitStride(instruction, index, graph_, &offset) && |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1299 | VectorizeUse(node, value, generate_code, type, restrictions)) { |
| 1300 | if (generate_code) { |
| 1301 | GenerateVecSub(index, offset); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1302 | GenerateVecMem(instruction, vector_map_->Get(index), vector_map_->Get(value), offset, type); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1303 | } else { |
| 1304 | vector_refs_->insert(ArrayReference(base, offset, type, /*lhs*/ true)); |
| 1305 | } |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 1306 | return true; |
| 1307 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1308 | return false; |
| 1309 | } |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1310 | // Accept a left-hand-side reduction for |
| 1311 | // (1) supported vector type, |
| 1312 | // (2) vectorizable right-hand-side value. |
| 1313 | auto redit = reductions_->find(instruction); |
| 1314 | if (redit != reductions_->end()) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1315 | DataType::Type type = instruction->GetType(); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1316 | // Recognize SAD idiom or direct reduction. |
| 1317 | if (VectorizeSADIdiom(node, instruction, generate_code, type, restrictions) || |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 1318 | VectorizeDotProdIdiom(node, instruction, generate_code, type, restrictions) || |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1319 | (TrySetVectorType(type, &restrictions) && |
| 1320 | VectorizeUse(node, instruction, generate_code, type, restrictions))) { |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1321 | if (generate_code) { |
| 1322 | HInstruction* new_red = vector_map_->Get(instruction); |
| 1323 | vector_permanent_map_->Put(new_red, vector_map_->Get(redit->second)); |
| 1324 | vector_permanent_map_->Overwrite(redit->second, new_red); |
| 1325 | } |
| 1326 | return true; |
| 1327 | } |
| 1328 | return false; |
| 1329 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1330 | // Branch back okay. |
| 1331 | if (instruction->IsGoto()) { |
| 1332 | return true; |
| 1333 | } |
| 1334 | // Otherwise accept only expressions with no effects outside the immediate loop-body. |
| 1335 | // Note that actual uses are inspected during right-hand-side tree traversal. |
Georgia Kouveli | bac080b | 2019-01-31 16:12:16 +0000 | [diff] [blame] | 1336 | return !IsUsedOutsideLoop(node->loop_info, instruction) |
| 1337 | && !instruction->DoesAnyWrite(); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1338 | } |
| 1339 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1340 | bool HLoopOptimization::VectorizeUse(LoopNode* node, |
| 1341 | HInstruction* instruction, |
| 1342 | bool generate_code, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1343 | DataType::Type type, |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1344 | uint64_t restrictions) { |
| 1345 | // Accept anything for which code has already been generated. |
| 1346 | if (generate_code) { |
| 1347 | if (vector_map_->find(instruction) != vector_map_->end()) { |
| 1348 | return true; |
| 1349 | } |
| 1350 | } |
| 1351 | // Continue the right-hand-side tree traversal, passing in proper |
| 1352 | // types and vector restrictions along the way. During code generation, |
| 1353 | // all new nodes are drawn from the global allocator. |
| 1354 | if (node->loop_info->IsDefinedOutOfTheLoop(instruction)) { |
| 1355 | // Accept invariant use, using scalar expansion. |
| 1356 | if (generate_code) { |
| 1357 | GenerateVecInv(instruction, type); |
| 1358 | } |
| 1359 | return true; |
| 1360 | } else if (instruction->IsArrayGet()) { |
Goran Jakovljevic | 19680d3 | 2017-05-11 10:38:36 +0200 | [diff] [blame] | 1361 | // Deal with vector restrictions. |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 1362 | bool is_string_char_at = instruction->AsArrayGet()->IsStringCharAt(); |
| 1363 | if (is_string_char_at && HasVectorRestrictions(restrictions, kNoStringCharAt)) { |
Goran Jakovljevic | 19680d3 | 2017-05-11 10:38:36 +0200 | [diff] [blame] | 1364 | return false; |
| 1365 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1366 | // Accept a right-hand-side array base[index] for |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 1367 | // (1) matching vector type (exact match or signed/unsigned integral type of the same size), |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1368 | // (2) loop-invariant base, |
| 1369 | // (3) unit stride index, |
| 1370 | // (4) vectorizable right-hand-side value. |
| 1371 | HInstruction* base = instruction->InputAt(0); |
| 1372 | HInstruction* index = instruction->InputAt(1); |
| 1373 | HInstruction* offset = nullptr; |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1374 | if (HVecOperation::ToSignedType(type) == HVecOperation::ToSignedType(instruction->GetType()) && |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1375 | node->loop_info->IsDefinedOutOfTheLoop(base) && |
Aart Bik | 37dc4df | 2017-06-28 14:08:00 -0700 | [diff] [blame] | 1376 | induction_range_.IsUnitStride(instruction, index, graph_, &offset)) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1377 | if (generate_code) { |
| 1378 | GenerateVecSub(index, offset); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1379 | GenerateVecMem(instruction, vector_map_->Get(index), nullptr, offset, type); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1380 | } else { |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1381 | vector_refs_->insert(ArrayReference(base, offset, type, /*lhs*/ false, is_string_char_at)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1382 | } |
| 1383 | return true; |
| 1384 | } |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1385 | } else if (instruction->IsPhi()) { |
| 1386 | // Accept particular phi operations. |
| 1387 | if (reductions_->find(instruction) != reductions_->end()) { |
| 1388 | // Deal with vector restrictions. |
| 1389 | if (HasVectorRestrictions(restrictions, kNoReduction)) { |
| 1390 | return false; |
| 1391 | } |
| 1392 | // Accept a reduction. |
| 1393 | if (generate_code) { |
| 1394 | GenerateVecReductionPhi(instruction->AsPhi()); |
| 1395 | } |
| 1396 | return true; |
| 1397 | } |
| 1398 | // TODO: accept right-hand-side induction? |
| 1399 | return false; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1400 | } else if (instruction->IsTypeConversion()) { |
| 1401 | // Accept particular type conversions. |
| 1402 | HTypeConversion* conversion = instruction->AsTypeConversion(); |
| 1403 | HInstruction* opa = conversion->InputAt(0); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1404 | DataType::Type from = conversion->GetInputType(); |
| 1405 | DataType::Type to = conversion->GetResultType(); |
| 1406 | if (DataType::IsIntegralType(from) && DataType::IsIntegralType(to)) { |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1407 | uint32_t size_vec = DataType::Size(type); |
| 1408 | uint32_t size_from = DataType::Size(from); |
| 1409 | uint32_t size_to = DataType::Size(to); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1410 | // Accept an integral conversion |
| 1411 | // (1a) narrowing into vector type, "wider" operations cannot bring in higher order bits, or |
| 1412 | // (1b) widening from at least vector type, and |
| 1413 | // (2) vectorizable operand. |
| 1414 | if ((size_to < size_from && |
| 1415 | size_to == size_vec && |
| 1416 | VectorizeUse(node, opa, generate_code, type, restrictions | kNoHiBits)) || |
| 1417 | (size_to >= size_from && |
| 1418 | size_from >= size_vec && |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 1419 | VectorizeUse(node, opa, generate_code, type, restrictions))) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1420 | if (generate_code) { |
| 1421 | if (vector_mode_ == kVector) { |
| 1422 | vector_map_->Put(instruction, vector_map_->Get(opa)); // operand pass-through |
| 1423 | } else { |
| 1424 | GenerateVecOp(instruction, vector_map_->Get(opa), nullptr, type); |
| 1425 | } |
| 1426 | } |
| 1427 | return true; |
| 1428 | } |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1429 | } else if (to == DataType::Type::kFloat32 && from == DataType::Type::kInt32) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1430 | DCHECK_EQ(to, type); |
| 1431 | // Accept int to float conversion for |
| 1432 | // (1) supported int, |
| 1433 | // (2) vectorizable operand. |
| 1434 | if (TrySetVectorType(from, &restrictions) && |
| 1435 | VectorizeUse(node, opa, generate_code, from, restrictions)) { |
| 1436 | if (generate_code) { |
| 1437 | GenerateVecOp(instruction, vector_map_->Get(opa), nullptr, type); |
| 1438 | } |
| 1439 | return true; |
| 1440 | } |
| 1441 | } |
| 1442 | return false; |
| 1443 | } else if (instruction->IsNeg() || instruction->IsNot() || instruction->IsBooleanNot()) { |
| 1444 | // Accept unary operator for vectorizable operand. |
| 1445 | HInstruction* opa = instruction->InputAt(0); |
| 1446 | if (VectorizeUse(node, opa, generate_code, type, restrictions)) { |
| 1447 | if (generate_code) { |
| 1448 | GenerateVecOp(instruction, vector_map_->Get(opa), nullptr, type); |
| 1449 | } |
| 1450 | return true; |
| 1451 | } |
| 1452 | } else if (instruction->IsAdd() || instruction->IsSub() || |
| 1453 | instruction->IsMul() || instruction->IsDiv() || |
| 1454 | instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) { |
| 1455 | // Deal with vector restrictions. |
| 1456 | if ((instruction->IsMul() && HasVectorRestrictions(restrictions, kNoMul)) || |
| 1457 | (instruction->IsDiv() && HasVectorRestrictions(restrictions, kNoDiv))) { |
| 1458 | return false; |
| 1459 | } |
| 1460 | // Accept binary operator for vectorizable operands. |
| 1461 | HInstruction* opa = instruction->InputAt(0); |
| 1462 | HInstruction* opb = instruction->InputAt(1); |
| 1463 | if (VectorizeUse(node, opa, generate_code, type, restrictions) && |
| 1464 | VectorizeUse(node, opb, generate_code, type, restrictions)) { |
| 1465 | if (generate_code) { |
| 1466 | GenerateVecOp(instruction, vector_map_->Get(opa), vector_map_->Get(opb), type); |
| 1467 | } |
| 1468 | return true; |
| 1469 | } |
| 1470 | } else if (instruction->IsShl() || instruction->IsShr() || instruction->IsUShr()) { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1471 | // Recognize halving add idiom. |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1472 | if (VectorizeHalvingAddIdiom(node, instruction, generate_code, type, restrictions)) { |
| 1473 | return true; |
| 1474 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1475 | // Deal with vector restrictions. |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 1476 | HInstruction* opa = instruction->InputAt(0); |
| 1477 | HInstruction* opb = instruction->InputAt(1); |
| 1478 | HInstruction* r = opa; |
| 1479 | bool is_unsigned = false; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1480 | if ((HasVectorRestrictions(restrictions, kNoShift)) || |
| 1481 | (instruction->IsShr() && HasVectorRestrictions(restrictions, kNoShr))) { |
| 1482 | return false; // unsupported instruction |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 1483 | } else if (HasVectorRestrictions(restrictions, kNoHiBits)) { |
| 1484 | // Shifts right need extra care to account for higher order bits. |
| 1485 | // TODO: less likely shr/unsigned and ushr/signed can by flipping signess. |
| 1486 | if (instruction->IsShr() && |
| 1487 | (!IsNarrowerOperand(opa, type, &r, &is_unsigned) || is_unsigned)) { |
| 1488 | return false; // reject, unless all operands are sign-extension narrower |
| 1489 | } else if (instruction->IsUShr() && |
| 1490 | (!IsNarrowerOperand(opa, type, &r, &is_unsigned) || !is_unsigned)) { |
| 1491 | return false; // reject, unless all operands are zero-extension narrower |
| 1492 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1493 | } |
| 1494 | // Accept shift operator for vectorizable/invariant operands. |
| 1495 | // TODO: accept symbolic, albeit loop invariant shift factors. |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 1496 | DCHECK(r != nullptr); |
| 1497 | if (generate_code && vector_mode_ != kVector) { // de-idiom |
| 1498 | r = opa; |
| 1499 | } |
Aart Bik | 50e20d5 | 2017-05-05 14:07:29 -0700 | [diff] [blame] | 1500 | int64_t distance = 0; |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 1501 | if (VectorizeUse(node, r, generate_code, type, restrictions) && |
Aart Bik | 50e20d5 | 2017-05-05 14:07:29 -0700 | [diff] [blame] | 1502 | IsInt64AndGet(opb, /*out*/ &distance)) { |
Aart Bik | 65ffd8e | 2017-05-01 16:50:45 -0700 | [diff] [blame] | 1503 | // Restrict shift distance to packed data type width. |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1504 | int64_t max_distance = DataType::Size(type) * 8; |
Aart Bik | 65ffd8e | 2017-05-01 16:50:45 -0700 | [diff] [blame] | 1505 | if (0 <= distance && distance < max_distance) { |
| 1506 | if (generate_code) { |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 1507 | GenerateVecOp(instruction, vector_map_->Get(r), opb, type); |
Aart Bik | 65ffd8e | 2017-05-01 16:50:45 -0700 | [diff] [blame] | 1508 | } |
| 1509 | return true; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1510 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1511 | } |
Aart Bik | 3b2a595 | 2018-03-05 13:55:28 -0800 | [diff] [blame] | 1512 | } else if (instruction->IsAbs()) { |
| 1513 | // Deal with vector restrictions. |
| 1514 | HInstruction* opa = instruction->InputAt(0); |
| 1515 | HInstruction* r = opa; |
| 1516 | bool is_unsigned = false; |
| 1517 | if (HasVectorRestrictions(restrictions, kNoAbs)) { |
| 1518 | return false; |
| 1519 | } else if (HasVectorRestrictions(restrictions, kNoHiBits) && |
| 1520 | (!IsNarrowerOperand(opa, type, &r, &is_unsigned) || is_unsigned)) { |
| 1521 | return false; // reject, unless operand is sign-extension narrower |
| 1522 | } |
| 1523 | // Accept ABS(x) for vectorizable operand. |
| 1524 | DCHECK(r != nullptr); |
| 1525 | if (generate_code && vector_mode_ != kVector) { // de-idiom |
| 1526 | r = opa; |
| 1527 | } |
| 1528 | if (VectorizeUse(node, r, generate_code, type, restrictions)) { |
| 1529 | if (generate_code) { |
| 1530 | GenerateVecOp(instruction, |
| 1531 | vector_map_->Get(r), |
| 1532 | nullptr, |
| 1533 | HVecOperation::ToProperType(type, is_unsigned)); |
| 1534 | } |
| 1535 | return true; |
| 1536 | } |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 1537 | } |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 1538 | return false; |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 1539 | } |
| 1540 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1541 | uint32_t HLoopOptimization::GetVectorSizeInBytes() { |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1542 | if (kIsDebugBuild) { |
| 1543 | InstructionSet isa = compiler_options_->GetInstructionSet(); |
| 1544 | // TODO: Remove this check when there are no implicit assumptions on the SIMD reg size. |
| 1545 | DCHECK_EQ(simd_register_size_, (isa == InstructionSet::kArm || isa == InstructionSet::kThumb2) |
| 1546 | ? 8u |
| 1547 | : 16u); |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1548 | } |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1549 | |
| 1550 | return simd_register_size_; |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1551 | } |
| 1552 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1553 | bool HLoopOptimization::TrySetVectorType(DataType::Type type, uint64_t* restrictions) { |
Vladimir Marko | a043111 | 2018-06-25 09:32:54 +0100 | [diff] [blame] | 1554 | const InstructionSetFeatures* features = compiler_options_->GetInstructionSetFeatures(); |
| 1555 | switch (compiler_options_->GetInstructionSet()) { |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 1556 | case InstructionSet::kArm: |
| 1557 | case InstructionSet::kThumb2: |
Artem Serov | 8f7c410 | 2017-06-21 11:21:37 +0100 | [diff] [blame] | 1558 | // Allow vectorization for all ARM devices, because Android assumes that |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 1559 | // ARM 32-bit always supports advanced SIMD (64-bit SIMD). |
Artem Serov | 8f7c410 | 2017-06-21 11:21:37 +0100 | [diff] [blame] | 1560 | switch (type) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1561 | case DataType::Type::kBool: |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 1562 | case DataType::Type::kUint8: |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1563 | case DataType::Type::kInt8: |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 1564 | *restrictions |= kNoDiv | kNoReduction | kNoDotProd; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1565 | return TrySetVectorLength(type, 8); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1566 | case DataType::Type::kUint16: |
| 1567 | case DataType::Type::kInt16: |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 1568 | *restrictions |= kNoDiv | kNoStringCharAt | kNoReduction | kNoDotProd; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1569 | return TrySetVectorLength(type, 4); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1570 | case DataType::Type::kInt32: |
Artem Serov | 6e9b137 | 2017-10-05 16:48:30 +0100 | [diff] [blame] | 1571 | *restrictions |= kNoDiv | kNoWideSAD; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1572 | return TrySetVectorLength(type, 2); |
Artem Serov | 8f7c410 | 2017-06-21 11:21:37 +0100 | [diff] [blame] | 1573 | default: |
| 1574 | break; |
| 1575 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1576 | return false; |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 1577 | case InstructionSet::kArm64: |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1578 | // Allow vectorization for all ARM devices, because Android assumes that |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 1579 | // ARMv8 AArch64 always supports advanced SIMD (128-bit SIMD). |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1580 | switch (type) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1581 | case DataType::Type::kBool: |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 1582 | case DataType::Type::kUint8: |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1583 | case DataType::Type::kInt8: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1584 | *restrictions |= kNoDiv; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1585 | return TrySetVectorLength(type, 16); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1586 | case DataType::Type::kUint16: |
| 1587 | case DataType::Type::kInt16: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1588 | *restrictions |= kNoDiv; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1589 | return TrySetVectorLength(type, 8); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1590 | case DataType::Type::kInt32: |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1591 | *restrictions |= kNoDiv; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1592 | return TrySetVectorLength(type, 4); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1593 | case DataType::Type::kInt64: |
Aart Bik | 3f08e9b | 2018-05-01 13:42:03 -0700 | [diff] [blame] | 1594 | *restrictions |= kNoDiv | kNoMul; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1595 | return TrySetVectorLength(type, 2); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1596 | case DataType::Type::kFloat32: |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1597 | *restrictions |= kNoReduction; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1598 | return TrySetVectorLength(type, 4); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1599 | case DataType::Type::kFloat64: |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1600 | *restrictions |= kNoReduction; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1601 | return TrySetVectorLength(type, 2); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1602 | default: |
| 1603 | return false; |
| 1604 | } |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 1605 | case InstructionSet::kX86: |
| 1606 | case InstructionSet::kX86_64: |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 1607 | // Allow vectorization for SSE4.1-enabled X86 devices only (128-bit SIMD). |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1608 | if (features->AsX86InstructionSetFeatures()->HasSSE4_1()) { |
| 1609 | switch (type) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1610 | case DataType::Type::kBool: |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 1611 | case DataType::Type::kUint8: |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1612 | case DataType::Type::kInt8: |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 1613 | *restrictions |= kNoMul | |
| 1614 | kNoDiv | |
| 1615 | kNoShift | |
| 1616 | kNoAbs | |
| 1617 | kNoSignedHAdd | |
| 1618 | kNoUnroundedHAdd | |
| 1619 | kNoSAD | |
| 1620 | kNoDotProd; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1621 | return TrySetVectorLength(type, 16); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1622 | case DataType::Type::kUint16: |
Alex Light | 43f2f75 | 2019-12-04 17:48:45 +0000 | [diff] [blame] | 1623 | *restrictions |= kNoDiv | |
| 1624 | kNoAbs | |
| 1625 | kNoSignedHAdd | |
| 1626 | kNoUnroundedHAdd | |
| 1627 | kNoSAD | |
| 1628 | kNoDotProd; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1629 | return TrySetVectorLength(type, 8); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1630 | case DataType::Type::kInt16: |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 1631 | *restrictions |= kNoDiv | |
| 1632 | kNoAbs | |
| 1633 | kNoSignedHAdd | |
| 1634 | kNoUnroundedHAdd | |
Alex Light | 43f2f75 | 2019-12-04 17:48:45 +0000 | [diff] [blame] | 1635 | kNoSAD; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1636 | return TrySetVectorLength(type, 8); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1637 | case DataType::Type::kInt32: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1638 | *restrictions |= kNoDiv | kNoSAD; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1639 | return TrySetVectorLength(type, 4); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1640 | case DataType::Type::kInt64: |
Aart Bik | 3f08e9b | 2018-05-01 13:42:03 -0700 | [diff] [blame] | 1641 | *restrictions |= kNoMul | kNoDiv | kNoShr | kNoAbs | kNoSAD; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1642 | return TrySetVectorLength(type, 2); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1643 | case DataType::Type::kFloat32: |
Aart Bik | 3f08e9b | 2018-05-01 13:42:03 -0700 | [diff] [blame] | 1644 | *restrictions |= kNoReduction; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1645 | return TrySetVectorLength(type, 4); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1646 | case DataType::Type::kFloat64: |
Aart Bik | 3f08e9b | 2018-05-01 13:42:03 -0700 | [diff] [blame] | 1647 | *restrictions |= kNoReduction; |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1648 | return TrySetVectorLength(type, 2); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1649 | default: |
| 1650 | break; |
| 1651 | } // switch type |
| 1652 | } |
| 1653 | return false; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1654 | default: |
| 1655 | return false; |
| 1656 | } // switch instruction set |
| 1657 | } |
| 1658 | |
Artem Serov | c8150b5 | 2019-07-31 18:28:00 +0100 | [diff] [blame] | 1659 | bool HLoopOptimization::TrySetVectorLengthImpl(uint32_t length) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1660 | DCHECK(IsPowerOfTwo(length) && length >= 2u); |
| 1661 | // First time set? |
| 1662 | if (vector_length_ == 0) { |
| 1663 | vector_length_ = length; |
| 1664 | } |
| 1665 | // Different types are acceptable within a loop-body, as long as all the corresponding vector |
| 1666 | // lengths match exactly to obtain a uniform traversal through the vector iteration space |
| 1667 | // (idiomatic exceptions to this rule can be handled by further unrolling sub-expressions). |
| 1668 | return vector_length_ == length; |
| 1669 | } |
| 1670 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1671 | void HLoopOptimization::GenerateVecInv(HInstruction* org, DataType::Type type) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1672 | if (vector_map_->find(org) == vector_map_->end()) { |
| 1673 | // In scalar code, just use a self pass-through for scalar invariants |
| 1674 | // (viz. expression remains itself). |
| 1675 | if (vector_mode_ == kSequential) { |
| 1676 | vector_map_->Put(org, org); |
| 1677 | return; |
| 1678 | } |
| 1679 | // In vector code, explicit scalar expansion is needed. |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1680 | HInstruction* vector = nullptr; |
| 1681 | auto it = vector_permanent_map_->find(org); |
| 1682 | if (it != vector_permanent_map_->end()) { |
| 1683 | vector = it->second; // reuse during unrolling |
| 1684 | } else { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1685 | // Generates ReplicateScalar( (optional_type_conv) org ). |
| 1686 | HInstruction* input = org; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1687 | DataType::Type input_type = input->GetType(); |
| 1688 | if (type != input_type && (type == DataType::Type::kInt64 || |
| 1689 | input_type == DataType::Type::kInt64)) { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1690 | input = Insert(vector_preheader_, |
| 1691 | new (global_allocator_) HTypeConversion(type, input, kNoDexPc)); |
| 1692 | } |
| 1693 | vector = new (global_allocator_) |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1694 | HVecReplicateScalar(global_allocator_, input, type, vector_length_, kNoDexPc); |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1695 | vector_permanent_map_->Put(org, Insert(vector_preheader_, vector)); |
| 1696 | } |
| 1697 | vector_map_->Put(org, vector); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1698 | } |
| 1699 | } |
| 1700 | |
| 1701 | void HLoopOptimization::GenerateVecSub(HInstruction* org, HInstruction* offset) { |
| 1702 | if (vector_map_->find(org) == vector_map_->end()) { |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1703 | HInstruction* subscript = vector_index_; |
Aart Bik | 37dc4df | 2017-06-28 14:08:00 -0700 | [diff] [blame] | 1704 | int64_t value = 0; |
| 1705 | if (!IsInt64AndGet(offset, &value) || value != 0) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1706 | subscript = new (global_allocator_) HAdd(DataType::Type::kInt32, subscript, offset); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1707 | if (org->IsPhi()) { |
| 1708 | Insert(vector_body_, subscript); // lacks layout placeholder |
| 1709 | } |
| 1710 | } |
| 1711 | vector_map_->Put(org, subscript); |
| 1712 | } |
| 1713 | } |
| 1714 | |
| 1715 | void HLoopOptimization::GenerateVecMem(HInstruction* org, |
| 1716 | HInstruction* opa, |
| 1717 | HInstruction* opb, |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1718 | HInstruction* offset, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1719 | DataType::Type type) { |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1720 | uint32_t dex_pc = org->GetDexPc(); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1721 | HInstruction* vector = nullptr; |
| 1722 | if (vector_mode_ == kVector) { |
| 1723 | // Vector store or load. |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1724 | bool is_string_char_at = false; |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1725 | HInstruction* base = org->InputAt(0); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1726 | if (opb != nullptr) { |
| 1727 | vector = new (global_allocator_) HVecStore( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1728 | global_allocator_, base, opa, opb, type, org->GetSideEffects(), vector_length_, dex_pc); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1729 | } else { |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1730 | is_string_char_at = org->AsArrayGet()->IsStringCharAt(); |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 1731 | vector = new (global_allocator_) HVecLoad(global_allocator_, |
| 1732 | base, |
| 1733 | opa, |
| 1734 | type, |
| 1735 | org->GetSideEffects(), |
| 1736 | vector_length_, |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1737 | is_string_char_at, |
| 1738 | dex_pc); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 1739 | } |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1740 | // Known (forced/adjusted/original) alignment? |
| 1741 | if (vector_dynamic_peeling_candidate_ != nullptr) { |
| 1742 | if (vector_dynamic_peeling_candidate_->offset == offset && // TODO: diffs too? |
| 1743 | DataType::Size(vector_dynamic_peeling_candidate_->type) == DataType::Size(type) && |
| 1744 | vector_dynamic_peeling_candidate_->is_string_char_at == is_string_char_at) { |
| 1745 | vector->AsVecMemoryOperation()->SetAlignment( // forced |
| 1746 | Alignment(GetVectorSizeInBytes(), 0)); |
| 1747 | } |
| 1748 | } else { |
| 1749 | vector->AsVecMemoryOperation()->SetAlignment( // adjusted/original |
| 1750 | ComputeAlignment(offset, type, is_string_char_at, vector_static_peeling_factor_)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1751 | } |
| 1752 | } else { |
| 1753 | // Scalar store or load. |
| 1754 | DCHECK(vector_mode_ == kSequential); |
| 1755 | if (opb != nullptr) { |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 1756 | DataType::Type component_type = org->AsArraySet()->GetComponentType(); |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 1757 | vector = new (global_allocator_) HArraySet( |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 1758 | org->InputAt(0), opa, opb, component_type, org->GetSideEffects(), dex_pc); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1759 | } else { |
Aart Bik | db14fcf | 2017-04-25 15:53:58 -0700 | [diff] [blame] | 1760 | bool is_string_char_at = org->AsArrayGet()->IsStringCharAt(); |
| 1761 | vector = new (global_allocator_) HArrayGet( |
Aart Bik | 4d1a9d4 | 2017-10-19 14:40:55 -0700 | [diff] [blame] | 1762 | org->InputAt(0), opa, org->GetType(), org->GetSideEffects(), dex_pc, is_string_char_at); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1763 | } |
| 1764 | } |
| 1765 | vector_map_->Put(org, vector); |
| 1766 | } |
| 1767 | |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1768 | void HLoopOptimization::GenerateVecReductionPhi(HPhi* phi) { |
| 1769 | DCHECK(reductions_->find(phi) != reductions_->end()); |
| 1770 | DCHECK(reductions_->Get(phi->InputAt(1)) == phi); |
| 1771 | HInstruction* vector = nullptr; |
| 1772 | if (vector_mode_ == kSequential) { |
| 1773 | HPhi* new_phi = new (global_allocator_) HPhi( |
| 1774 | global_allocator_, kNoRegNumber, 0, phi->GetType()); |
| 1775 | vector_header_->AddPhi(new_phi); |
| 1776 | vector = new_phi; |
| 1777 | } else { |
| 1778 | // Link vector reduction back to prior unrolled update, or a first phi. |
| 1779 | auto it = vector_permanent_map_->find(phi); |
| 1780 | if (it != vector_permanent_map_->end()) { |
| 1781 | vector = it->second; |
| 1782 | } else { |
| 1783 | HPhi* new_phi = new (global_allocator_) HPhi( |
| 1784 | global_allocator_, kNoRegNumber, 0, HVecOperation::kSIMDType); |
| 1785 | vector_header_->AddPhi(new_phi); |
| 1786 | vector = new_phi; |
| 1787 | } |
| 1788 | } |
| 1789 | vector_map_->Put(phi, vector); |
| 1790 | } |
| 1791 | |
| 1792 | void HLoopOptimization::GenerateVecReductionPhiInputs(HPhi* phi, HInstruction* reduction) { |
| 1793 | HInstruction* new_phi = vector_map_->Get(phi); |
| 1794 | HInstruction* new_init = reductions_->Get(phi); |
| 1795 | HInstruction* new_red = vector_map_->Get(reduction); |
| 1796 | // Link unrolled vector loop back to new phi. |
| 1797 | for (; !new_phi->IsPhi(); new_phi = vector_permanent_map_->Get(new_phi)) { |
| 1798 | DCHECK(new_phi->IsVecOperation()); |
| 1799 | } |
| 1800 | // Prepare the new initialization. |
| 1801 | if (vector_mode_ == kVector) { |
Goran Jakovljevic | 89b8df0 | 2017-10-13 08:33:17 +0200 | [diff] [blame] | 1802 | // Generate a [initial, 0, .., 0] vector for add or |
| 1803 | // a [initial, initial, .., initial] vector for min/max. |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1804 | HVecOperation* red_vector = new_red->AsVecOperation(); |
Goran Jakovljevic | 89b8df0 | 2017-10-13 08:33:17 +0200 | [diff] [blame] | 1805 | HVecReduce::ReductionKind kind = GetReductionKind(red_vector); |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1806 | uint32_t vector_length = red_vector->GetVectorLength(); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1807 | DataType::Type type = red_vector->GetPackedType(); |
Goran Jakovljevic | 89b8df0 | 2017-10-13 08:33:17 +0200 | [diff] [blame] | 1808 | if (kind == HVecReduce::ReductionKind::kSum) { |
| 1809 | new_init = Insert(vector_preheader_, |
| 1810 | new (global_allocator_) HVecSetScalars(global_allocator_, |
| 1811 | &new_init, |
| 1812 | type, |
| 1813 | vector_length, |
| 1814 | 1, |
| 1815 | kNoDexPc)); |
| 1816 | } else { |
| 1817 | new_init = Insert(vector_preheader_, |
| 1818 | new (global_allocator_) HVecReplicateScalar(global_allocator_, |
| 1819 | new_init, |
| 1820 | type, |
| 1821 | vector_length, |
| 1822 | kNoDexPc)); |
| 1823 | } |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1824 | } else { |
| 1825 | new_init = ReduceAndExtractIfNeeded(new_init); |
| 1826 | } |
| 1827 | // Set the phi inputs. |
| 1828 | DCHECK(new_phi->IsPhi()); |
| 1829 | new_phi->AsPhi()->AddInput(new_init); |
| 1830 | new_phi->AsPhi()->AddInput(new_red); |
| 1831 | // New feed value for next phi (safe mutation in iteration). |
| 1832 | reductions_->find(phi)->second = new_phi; |
| 1833 | } |
| 1834 | |
| 1835 | HInstruction* HLoopOptimization::ReduceAndExtractIfNeeded(HInstruction* instruction) { |
| 1836 | if (instruction->IsPhi()) { |
| 1837 | HInstruction* input = instruction->InputAt(1); |
Aart Bik | 2dd7b67 | 2017-12-07 11:11:22 -0800 | [diff] [blame] | 1838 | if (HVecOperation::ReturnsSIMDValue(input)) { |
| 1839 | DCHECK(!input->IsPhi()); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1840 | HVecOperation* input_vector = input->AsVecOperation(); |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 1841 | uint32_t vector_length = input_vector->GetVectorLength(); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1842 | DataType::Type type = input_vector->GetPackedType(); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1843 | HVecReduce::ReductionKind kind = GetReductionKind(input_vector); |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1844 | HBasicBlock* exit = instruction->GetBlock()->GetSuccessors()[0]; |
| 1845 | // Generate a vector reduction and scalar extract |
| 1846 | // x = REDUCE( [x_1, .., x_n] ) |
| 1847 | // y = x_1 |
| 1848 | // along the exit of the defining loop. |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1849 | HInstruction* reduce = new (global_allocator_) HVecReduce( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1850 | global_allocator_, instruction, type, vector_length, kind, kNoDexPc); |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1851 | exit->InsertInstructionBefore(reduce, exit->GetFirstInstruction()); |
| 1852 | instruction = new (global_allocator_) HVecExtractScalar( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1853 | global_allocator_, reduce, type, vector_length, 0, kNoDexPc); |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1854 | exit->InsertInstructionAfter(instruction, reduce); |
| 1855 | } |
| 1856 | } |
| 1857 | return instruction; |
| 1858 | } |
| 1859 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1860 | #define GENERATE_VEC(x, y) \ |
| 1861 | if (vector_mode_ == kVector) { \ |
| 1862 | vector = (x); \ |
| 1863 | } else { \ |
| 1864 | DCHECK(vector_mode_ == kSequential); \ |
| 1865 | vector = (y); \ |
| 1866 | } \ |
| 1867 | break; |
| 1868 | |
| 1869 | void HLoopOptimization::GenerateVecOp(HInstruction* org, |
| 1870 | HInstruction* opa, |
| 1871 | HInstruction* opb, |
Aart Bik | 3f08e9b | 2018-05-01 13:42:03 -0700 | [diff] [blame] | 1872 | DataType::Type type) { |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1873 | uint32_t dex_pc = org->GetDexPc(); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1874 | HInstruction* vector = nullptr; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1875 | DataType::Type org_type = org->GetType(); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1876 | switch (org->GetKind()) { |
| 1877 | case HInstruction::kNeg: |
| 1878 | DCHECK(opb == nullptr); |
| 1879 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1880 | new (global_allocator_) HVecNeg(global_allocator_, opa, type, vector_length_, dex_pc), |
| 1881 | new (global_allocator_) HNeg(org_type, opa, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1882 | case HInstruction::kNot: |
| 1883 | DCHECK(opb == nullptr); |
| 1884 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1885 | new (global_allocator_) HVecNot(global_allocator_, opa, type, vector_length_, dex_pc), |
| 1886 | new (global_allocator_) HNot(org_type, opa, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1887 | case HInstruction::kBooleanNot: |
| 1888 | DCHECK(opb == nullptr); |
| 1889 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1890 | new (global_allocator_) HVecNot(global_allocator_, opa, type, vector_length_, dex_pc), |
| 1891 | new (global_allocator_) HBooleanNot(opa, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1892 | case HInstruction::kTypeConversion: |
| 1893 | DCHECK(opb == nullptr); |
| 1894 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1895 | new (global_allocator_) HVecCnv(global_allocator_, opa, type, vector_length_, dex_pc), |
| 1896 | new (global_allocator_) HTypeConversion(org_type, opa, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1897 | case HInstruction::kAdd: |
| 1898 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1899 | new (global_allocator_) HVecAdd(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1900 | new (global_allocator_) HAdd(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1901 | case HInstruction::kSub: |
| 1902 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1903 | new (global_allocator_) HVecSub(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1904 | new (global_allocator_) HSub(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1905 | case HInstruction::kMul: |
| 1906 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1907 | new (global_allocator_) HVecMul(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1908 | new (global_allocator_) HMul(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1909 | case HInstruction::kDiv: |
| 1910 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1911 | new (global_allocator_) HVecDiv(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1912 | new (global_allocator_) HDiv(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1913 | case HInstruction::kAnd: |
| 1914 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1915 | new (global_allocator_) HVecAnd(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1916 | new (global_allocator_) HAnd(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1917 | case HInstruction::kOr: |
| 1918 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1919 | new (global_allocator_) HVecOr(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1920 | new (global_allocator_) HOr(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1921 | case HInstruction::kXor: |
| 1922 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1923 | new (global_allocator_) HVecXor(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1924 | new (global_allocator_) HXor(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1925 | case HInstruction::kShl: |
| 1926 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1927 | new (global_allocator_) HVecShl(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1928 | new (global_allocator_) HShl(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1929 | case HInstruction::kShr: |
| 1930 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1931 | new (global_allocator_) HVecShr(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1932 | new (global_allocator_) HShr(org_type, opa, opb, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1933 | case HInstruction::kUShr: |
| 1934 | GENERATE_VEC( |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 1935 | new (global_allocator_) HVecUShr(global_allocator_, opa, opb, type, vector_length_, dex_pc), |
| 1936 | new (global_allocator_) HUShr(org_type, opa, opb, dex_pc)); |
Aart Bik | 3b2a595 | 2018-03-05 13:55:28 -0800 | [diff] [blame] | 1937 | case HInstruction::kAbs: |
| 1938 | DCHECK(opb == nullptr); |
| 1939 | GENERATE_VEC( |
| 1940 | new (global_allocator_) HVecAbs(global_allocator_, opa, type, vector_length_, dex_pc), |
| 1941 | new (global_allocator_) HAbs(org_type, opa, dex_pc)); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 1942 | default: |
| 1943 | break; |
| 1944 | } // switch |
| 1945 | CHECK(vector != nullptr) << "Unsupported SIMD operator"; |
| 1946 | vector_map_->Put(org, vector); |
| 1947 | } |
| 1948 | |
| 1949 | #undef GENERATE_VEC |
| 1950 | |
| 1951 | // |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1952 | // Vectorization idioms. |
| 1953 | // |
| 1954 | |
| 1955 | // Method recognizes the following idioms: |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 1956 | // rounding halving add (a + b + 1) >> 1 for unsigned/signed operands a, b |
| 1957 | // truncated halving add (a + b) >> 1 for unsigned/signed operands a, b |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1958 | // Provided that the operands are promoted to a wider form to do the arithmetic and |
| 1959 | // then cast back to narrower form, the idioms can be mapped into efficient SIMD |
| 1960 | // implementation that operates directly in narrower form (plus one extra bit). |
| 1961 | // TODO: current version recognizes implicit byte/short/char widening only; |
| 1962 | // explicit widening from int to long could be added later. |
| 1963 | bool HLoopOptimization::VectorizeHalvingAddIdiom(LoopNode* node, |
| 1964 | HInstruction* instruction, |
| 1965 | bool generate_code, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1966 | DataType::Type type, |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1967 | uint64_t restrictions) { |
| 1968 | // Test for top level arithmetic shift right x >> 1 or logical shift right x >>> 1 |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 1969 | // (note whether the sign bit in wider precision is shifted in has no effect |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1970 | // on the narrow precision computed by the idiom). |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1971 | if ((instruction->IsShr() || |
| 1972 | instruction->IsUShr()) && |
Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 1973 | IsInt64Value(instruction->InputAt(1), 1)) { |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 1974 | // Test for (a + b + c) >> 1 for optional constant c. |
| 1975 | HInstruction* a = nullptr; |
| 1976 | HInstruction* b = nullptr; |
| 1977 | int64_t c = 0; |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 1978 | if (IsAddConst2(graph_, instruction->InputAt(0), /*out*/ &a, /*out*/ &b, /*out*/ &c)) { |
Aart Bik | 5f80500 | 2017-05-16 16:42:41 -0700 | [diff] [blame] | 1979 | // Accept c == 1 (rounded) or c == 0 (not rounded). |
| 1980 | bool is_rounded = false; |
| 1981 | if (c == 1) { |
| 1982 | is_rounded = true; |
| 1983 | } else if (c != 0) { |
| 1984 | return false; |
| 1985 | } |
| 1986 | // Accept consistent zero or sign extension on operands a and b. |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1987 | HInstruction* r = nullptr; |
| 1988 | HInstruction* s = nullptr; |
| 1989 | bool is_unsigned = false; |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 1990 | if (!IsNarrowerOperands(a, b, type, &r, &s, &is_unsigned)) { |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 1991 | return false; |
| 1992 | } |
| 1993 | // Deal with vector restrictions. |
| 1994 | if ((!is_unsigned && HasVectorRestrictions(restrictions, kNoSignedHAdd)) || |
| 1995 | (!is_rounded && HasVectorRestrictions(restrictions, kNoUnroundedHAdd))) { |
| 1996 | return false; |
| 1997 | } |
| 1998 | // Accept recognized halving add for vectorizable operands. Vectorized code uses the |
| 1999 | // shorthand idiomatic operation. Sequential code uses the original scalar expressions. |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 2000 | DCHECK(r != nullptr && s != nullptr); |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 2001 | if (generate_code && vector_mode_ != kVector) { // de-idiom |
| 2002 | r = instruction->InputAt(0); |
| 2003 | s = instruction->InputAt(1); |
| 2004 | } |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 2005 | if (VectorizeUse(node, r, generate_code, type, restrictions) && |
| 2006 | VectorizeUse(node, s, generate_code, type, restrictions)) { |
| 2007 | if (generate_code) { |
| 2008 | if (vector_mode_ == kVector) { |
| 2009 | vector_map_->Put(instruction, new (global_allocator_) HVecHalvingAdd( |
| 2010 | global_allocator_, |
| 2011 | vector_map_->Get(r), |
| 2012 | vector_map_->Get(s), |
Aart Bik | 66c158e | 2018-01-31 12:55:04 -0800 | [diff] [blame] | 2013 | HVecOperation::ToProperType(type, is_unsigned), |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 2014 | vector_length_, |
Vladimir Marko | d5d2f2c | 2017-09-26 12:37:26 +0100 | [diff] [blame] | 2015 | is_rounded, |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 2016 | kNoDexPc)); |
Aart Bik | 21b8592 | 2017-09-06 13:29:16 -0700 | [diff] [blame] | 2017 | MaybeRecordStat(stats_, MethodCompilationStat::kLoopVectorizedIdiom); |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 2018 | } else { |
Aart Bik | 304c8a5 | 2017-05-23 11:01:13 -0700 | [diff] [blame] | 2019 | GenerateVecOp(instruction, vector_map_->Get(r), vector_map_->Get(s), type); |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 2020 | } |
| 2021 | } |
| 2022 | return true; |
| 2023 | } |
| 2024 | } |
| 2025 | } |
| 2026 | return false; |
| 2027 | } |
| 2028 | |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2029 | // Method recognizes the following idiom: |
| 2030 | // q += ABS(a - b) for signed operands a, b |
| 2031 | // Provided that the operands have the same type or are promoted to a wider form. |
| 2032 | // Since this may involve a vector length change, the idiom is handled by going directly |
| 2033 | // to a sad-accumulate node (rather than relying combining finer grained nodes later). |
| 2034 | // TODO: unsigned SAD too? |
| 2035 | bool HLoopOptimization::VectorizeSADIdiom(LoopNode* node, |
| 2036 | HInstruction* instruction, |
| 2037 | bool generate_code, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2038 | DataType::Type reduction_type, |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2039 | uint64_t restrictions) { |
| 2040 | // Filter integral "q += ABS(a - b);" reduction, where ABS and SUB |
| 2041 | // are done in the same precision (either int or long). |
| 2042 | if (!instruction->IsAdd() || |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2043 | (reduction_type != DataType::Type::kInt32 && reduction_type != DataType::Type::kInt64)) { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2044 | return false; |
| 2045 | } |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2046 | HInstruction* acc = instruction->InputAt(0); |
| 2047 | HInstruction* abs = instruction->InputAt(1); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2048 | HInstruction* a = nullptr; |
| 2049 | HInstruction* b = nullptr; |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2050 | if (abs->IsAbs() && |
| 2051 | abs->GetType() == reduction_type && |
| 2052 | IsSubConst2(graph_, abs->InputAt(0), /*out*/ &a, /*out*/ &b)) { |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 2053 | DCHECK(a != nullptr && b != nullptr); |
| 2054 | } else { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2055 | return false; |
| 2056 | } |
| 2057 | // Accept same-type or consistent sign extension for narrower-type on operands a and b. |
| 2058 | // The same-type or narrower operands are called r (a or lower) and s (b or lower). |
Aart Bik | df011c3 | 2017-09-28 12:53:04 -0700 | [diff] [blame] | 2059 | // We inspect the operands carefully to pick the most suited type. |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2060 | HInstruction* r = a; |
| 2061 | HInstruction* s = b; |
| 2062 | bool is_unsigned = false; |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2063 | DataType::Type sub_type = GetNarrowerType(a, b); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2064 | if (reduction_type != sub_type && |
| 2065 | (!IsNarrowerOperands(a, b, sub_type, &r, &s, &is_unsigned) || is_unsigned)) { |
| 2066 | return false; |
| 2067 | } |
| 2068 | // Try same/narrower type and deal with vector restrictions. |
Artem Serov | 6e9b137 | 2017-10-05 16:48:30 +0100 | [diff] [blame] | 2069 | if (!TrySetVectorType(sub_type, &restrictions) || |
| 2070 | HasVectorRestrictions(restrictions, kNoSAD) || |
| 2071 | (reduction_type != sub_type && HasVectorRestrictions(restrictions, kNoWideSAD))) { |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2072 | return false; |
| 2073 | } |
| 2074 | // Accept SAD idiom for vectorizable operands. Vectorized code uses the shorthand |
| 2075 | // idiomatic operation. Sequential code uses the original scalar expressions. |
Nicolas Geoffray | a3e2326 | 2018-03-28 11:15:12 +0000 | [diff] [blame] | 2076 | DCHECK(r != nullptr && s != nullptr); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2077 | if (generate_code && vector_mode_ != kVector) { // de-idiom |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2078 | r = s = abs->InputAt(0); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2079 | } |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2080 | if (VectorizeUse(node, acc, generate_code, sub_type, restrictions) && |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2081 | VectorizeUse(node, r, generate_code, sub_type, restrictions) && |
| 2082 | VectorizeUse(node, s, generate_code, sub_type, restrictions)) { |
| 2083 | if (generate_code) { |
| 2084 | if (vector_mode_ == kVector) { |
| 2085 | vector_map_->Put(instruction, new (global_allocator_) HVecSADAccumulate( |
| 2086 | global_allocator_, |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2087 | vector_map_->Get(acc), |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2088 | vector_map_->Get(r), |
| 2089 | vector_map_->Get(s), |
Aart Bik | 3b2a595 | 2018-03-05 13:55:28 -0800 | [diff] [blame] | 2090 | HVecOperation::ToProperType(reduction_type, is_unsigned), |
Aart Bik | 46b6dbc | 2017-10-03 11:37:37 -0700 | [diff] [blame] | 2091 | GetOtherVL(reduction_type, sub_type, vector_length_), |
| 2092 | kNoDexPc)); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2093 | MaybeRecordStat(stats_, MethodCompilationStat::kLoopVectorizedIdiom); |
| 2094 | } else { |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2095 | // "GenerateVecOp()" must not be called more than once for each original loop body |
| 2096 | // instruction. As the SAD idiom processes both "current" instruction ("instruction") |
| 2097 | // and its ABS input in one go, we must check that for the scalar case the ABS instruction |
| 2098 | // has not yet been processed. |
| 2099 | if (vector_map_->find(abs) == vector_map_->end()) { |
| 2100 | GenerateVecOp(abs, vector_map_->Get(r), nullptr, reduction_type); |
| 2101 | } |
| 2102 | GenerateVecOp(instruction, vector_map_->Get(acc), vector_map_->Get(abs), reduction_type); |
Aart Bik | dbbac8f | 2017-09-01 13:06:08 -0700 | [diff] [blame] | 2103 | } |
| 2104 | } |
| 2105 | return true; |
| 2106 | } |
| 2107 | return false; |
| 2108 | } |
| 2109 | |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2110 | // Method recognises the following dot product idiom: |
| 2111 | // q += a * b for operands a, b whose type is narrower than the reduction one. |
| 2112 | // Provided that the operands have the same type or are promoted to a wider form. |
| 2113 | // Since this may involve a vector length change, the idiom is handled by going directly |
| 2114 | // to a dot product node (rather than relying combining finer grained nodes later). |
| 2115 | bool HLoopOptimization::VectorizeDotProdIdiom(LoopNode* node, |
| 2116 | HInstruction* instruction, |
| 2117 | bool generate_code, |
| 2118 | DataType::Type reduction_type, |
| 2119 | uint64_t restrictions) { |
Alex Light | 43f2f75 | 2019-12-04 17:48:45 +0000 | [diff] [blame] | 2120 | if (!instruction->IsAdd() || reduction_type != DataType::Type::kInt32) { |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2121 | return false; |
| 2122 | } |
| 2123 | |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2124 | HInstruction* const acc = instruction->InputAt(0); |
| 2125 | HInstruction* const mul = instruction->InputAt(1); |
| 2126 | if (!mul->IsMul() || mul->GetType() != reduction_type) { |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2127 | return false; |
| 2128 | } |
| 2129 | |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2130 | HInstruction* const mul_left = mul->InputAt(0); |
| 2131 | HInstruction* const mul_right = mul->InputAt(1); |
| 2132 | HInstruction* r = mul_left; |
| 2133 | HInstruction* s = mul_right; |
| 2134 | DataType::Type op_type = GetNarrowerType(mul_left, mul_right); |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2135 | bool is_unsigned = false; |
| 2136 | |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2137 | if (!IsNarrowerOperands(mul_left, mul_right, op_type, &r, &s, &is_unsigned)) { |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2138 | return false; |
| 2139 | } |
| 2140 | op_type = HVecOperation::ToProperType(op_type, is_unsigned); |
| 2141 | |
| 2142 | if (!TrySetVectorType(op_type, &restrictions) || |
| 2143 | HasVectorRestrictions(restrictions, kNoDotProd)) { |
| 2144 | return false; |
| 2145 | } |
| 2146 | |
| 2147 | DCHECK(r != nullptr && s != nullptr); |
| 2148 | // Accept dot product idiom for vectorizable operands. Vectorized code uses the shorthand |
| 2149 | // idiomatic operation. Sequential code uses the original scalar expressions. |
| 2150 | if (generate_code && vector_mode_ != kVector) { // de-idiom |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2151 | r = mul_left; |
| 2152 | s = mul_right; |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2153 | } |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2154 | if (VectorizeUse(node, acc, generate_code, op_type, restrictions) && |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2155 | VectorizeUse(node, r, generate_code, op_type, restrictions) && |
| 2156 | VectorizeUse(node, s, generate_code, op_type, restrictions)) { |
| 2157 | if (generate_code) { |
| 2158 | if (vector_mode_ == kVector) { |
| 2159 | vector_map_->Put(instruction, new (global_allocator_) HVecDotProd( |
| 2160 | global_allocator_, |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2161 | vector_map_->Get(acc), |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2162 | vector_map_->Get(r), |
| 2163 | vector_map_->Get(s), |
| 2164 | reduction_type, |
| 2165 | is_unsigned, |
| 2166 | GetOtherVL(reduction_type, op_type, vector_length_), |
| 2167 | kNoDexPc)); |
| 2168 | MaybeRecordStat(stats_, MethodCompilationStat::kLoopVectorizedIdiom); |
| 2169 | } else { |
Artem Serov | e521eb0 | 2020-02-27 18:51:24 +0000 | [diff] [blame] | 2170 | // "GenerateVecOp()" must not be called more than once for each original loop body |
| 2171 | // instruction. As the DotProd idiom processes both "current" instruction ("instruction") |
| 2172 | // and its MUL input in one go, we must check that for the scalar case the MUL instruction |
| 2173 | // has not yet been processed. |
| 2174 | if (vector_map_->find(mul) == vector_map_->end()) { |
| 2175 | GenerateVecOp(mul, vector_map_->Get(r), vector_map_->Get(s), reduction_type); |
| 2176 | } |
| 2177 | GenerateVecOp(instruction, vector_map_->Get(acc), vector_map_->Get(mul), reduction_type); |
Artem Serov | aaac0e3 | 2018-08-07 00:52:22 +0100 | [diff] [blame] | 2178 | } |
| 2179 | } |
| 2180 | return true; |
| 2181 | } |
| 2182 | return false; |
| 2183 | } |
| 2184 | |
Aart Bik | f3e61ee | 2017-04-12 17:09:20 -0700 | [diff] [blame] | 2185 | // |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2186 | // Vectorization heuristics. |
| 2187 | // |
| 2188 | |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 2189 | Alignment HLoopOptimization::ComputeAlignment(HInstruction* offset, |
| 2190 | DataType::Type type, |
| 2191 | bool is_string_char_at, |
| 2192 | uint32_t peeling) { |
| 2193 | // Combine the alignment and hidden offset that is guaranteed by |
| 2194 | // the Android runtime with a known starting index adjusted as bytes. |
| 2195 | int64_t value = 0; |
| 2196 | if (IsInt64AndGet(offset, /*out*/ &value)) { |
| 2197 | uint32_t start_offset = |
| 2198 | HiddenOffset(type, is_string_char_at) + (value + peeling) * DataType::Size(type); |
| 2199 | return Alignment(BaseAlignment(), start_offset & (BaseAlignment() - 1u)); |
| 2200 | } |
| 2201 | // Otherwise, the Android runtime guarantees at least natural alignment. |
| 2202 | return Alignment(DataType::Size(type), 0); |
| 2203 | } |
| 2204 | |
| 2205 | void HLoopOptimization::SetAlignmentStrategy(uint32_t peeling_votes[], |
| 2206 | const ArrayReference* peeling_candidate) { |
| 2207 | // Current heuristic: pick the best static loop peeling factor, if any, |
| 2208 | // or otherwise use dynamic loop peeling on suggested peeling candidate. |
| 2209 | uint32_t max_vote = 0; |
| 2210 | for (int32_t i = 0; i < 16; i++) { |
| 2211 | if (peeling_votes[i] > max_vote) { |
| 2212 | max_vote = peeling_votes[i]; |
| 2213 | vector_static_peeling_factor_ = i; |
| 2214 | } |
| 2215 | } |
| 2216 | if (max_vote == 0) { |
| 2217 | vector_dynamic_peeling_candidate_ = peeling_candidate; |
| 2218 | } |
| 2219 | } |
| 2220 | |
| 2221 | uint32_t HLoopOptimization::MaxNumberPeeled() { |
| 2222 | if (vector_dynamic_peeling_candidate_ != nullptr) { |
| 2223 | return vector_length_ - 1u; // worst-case |
| 2224 | } |
| 2225 | return vector_static_peeling_factor_; // known exactly |
| 2226 | } |
| 2227 | |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2228 | bool HLoopOptimization::IsVectorizationProfitable(int64_t trip_count) { |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 2229 | // Current heuristic: non-empty body with sufficient number of iterations (if known). |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2230 | // TODO: refine by looking at e.g. operation count, alignment, etc. |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 2231 | // TODO: trip count is really unsigned entity, provided the guarding test |
| 2232 | // is satisfied; deal with this more carefully later |
| 2233 | uint32_t max_peel = MaxNumberPeeled(); |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2234 | if (vector_length_ == 0) { |
| 2235 | return false; // nothing found |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 2236 | } else if (trip_count < 0) { |
| 2237 | return false; // guard against non-taken/large |
| 2238 | } else if ((0 < trip_count) && (trip_count < (vector_length_ + max_peel))) { |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2239 | return false; // insufficient iterations |
| 2240 | } |
| 2241 | return true; |
| 2242 | } |
| 2243 | |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2244 | // |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2245 | // Helpers. |
| 2246 | // |
| 2247 | |
| 2248 | bool HLoopOptimization::TrySetPhiInduction(HPhi* phi, bool restrict_uses) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2249 | // Start with empty phi induction. |
| 2250 | iset_->clear(); |
| 2251 | |
Nicolas Geoffray | f57c1ae | 2017-06-28 17:40:18 +0100 | [diff] [blame] | 2252 | // Special case Phis that have equivalent in a debuggable setup. Our graph checker isn't |
| 2253 | // smart enough to follow strongly connected components (and it's probably not worth |
| 2254 | // it to make it so). See b/33775412. |
| 2255 | if (graph_->IsDebuggable() && phi->HasEquivalentPhi()) { |
| 2256 | return false; |
| 2257 | } |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2258 | |
| 2259 | // Lookup phi induction cycle. |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2260 | ArenaSet<HInstruction*>* set = induction_range_.LookupCycle(phi); |
| 2261 | if (set != nullptr) { |
| 2262 | for (HInstruction* i : *set) { |
Aart Bik | e3dedc5 | 2016-11-02 17:50:27 -0700 | [diff] [blame] | 2263 | // Check that, other than instructions that are no longer in the graph (removed earlier) |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2264 | // each instruction is removable and, when restrict uses are requested, other than for phi, |
| 2265 | // all uses are contained within the cycle. |
Aart Bik | e3dedc5 | 2016-11-02 17:50:27 -0700 | [diff] [blame] | 2266 | if (!i->IsInBlock()) { |
| 2267 | continue; |
| 2268 | } else if (!i->IsRemovable()) { |
| 2269 | return false; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2270 | } else if (i != phi && restrict_uses) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2271 | // Deal with regular uses. |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2272 | for (const HUseListNode<HInstruction*>& use : i->GetUses()) { |
| 2273 | if (set->find(use.GetUser()) == set->end()) { |
| 2274 | return false; |
| 2275 | } |
| 2276 | } |
| 2277 | } |
Aart Bik | e3dedc5 | 2016-11-02 17:50:27 -0700 | [diff] [blame] | 2278 | iset_->insert(i); // copy |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2279 | } |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2280 | return true; |
| 2281 | } |
| 2282 | return false; |
| 2283 | } |
| 2284 | |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2285 | bool HLoopOptimization::TrySetPhiReduction(HPhi* phi) { |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2286 | DCHECK(iset_->empty()); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2287 | // Only unclassified phi cycles are candidates for reductions. |
| 2288 | if (induction_range_.IsClassified(phi)) { |
| 2289 | return false; |
| 2290 | } |
| 2291 | // Accept operations like x = x + .., provided that the phi and the reduction are |
| 2292 | // used exactly once inside the loop, and by each other. |
| 2293 | HInputsRef inputs = phi->GetInputs(); |
| 2294 | if (inputs.size() == 2) { |
| 2295 | HInstruction* reduction = inputs[1]; |
| 2296 | if (HasReductionFormat(reduction, phi)) { |
| 2297 | HLoopInformation* loop_info = phi->GetBlock()->GetLoopInformation(); |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 2298 | uint32_t use_count = 0; |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2299 | bool single_use_inside_loop = |
| 2300 | // Reduction update only used by phi. |
| 2301 | reduction->GetUses().HasExactlyOneElement() && |
| 2302 | !reduction->HasEnvironmentUses() && |
| 2303 | // Reduction update is only use of phi inside the loop. |
| 2304 | IsOnlyUsedAfterLoop(loop_info, phi, /*collect_loop_uses*/ true, &use_count) && |
| 2305 | iset_->size() == 1; |
| 2306 | iset_->clear(); // leave the way you found it |
| 2307 | if (single_use_inside_loop) { |
| 2308 | // Link reduction back, and start recording feed value. |
| 2309 | reductions_->Put(reduction, phi); |
| 2310 | reductions_->Put(phi, phi->InputAt(0)); |
| 2311 | return true; |
| 2312 | } |
| 2313 | } |
| 2314 | } |
| 2315 | return false; |
| 2316 | } |
| 2317 | |
| 2318 | bool HLoopOptimization::TrySetSimpleLoopHeader(HBasicBlock* block, /*out*/ HPhi** main_phi) { |
| 2319 | // Start with empty phi induction and reductions. |
| 2320 | iset_->clear(); |
| 2321 | reductions_->clear(); |
| 2322 | |
| 2323 | // Scan the phis to find the following (the induction structure has already |
| 2324 | // been optimized, so we don't need to worry about trivial cases): |
| 2325 | // (1) optional reductions in loop, |
| 2326 | // (2) the main induction, used in loop control. |
| 2327 | HPhi* phi = nullptr; |
| 2328 | for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) { |
| 2329 | if (TrySetPhiReduction(it.Current()->AsPhi())) { |
| 2330 | continue; |
| 2331 | } else if (phi == nullptr) { |
| 2332 | // Found the first candidate for main induction. |
| 2333 | phi = it.Current()->AsPhi(); |
| 2334 | } else { |
| 2335 | return false; |
| 2336 | } |
| 2337 | } |
| 2338 | |
| 2339 | // Then test for a typical loopheader: |
| 2340 | // s: SuspendCheck |
| 2341 | // c: Condition(phi, bound) |
| 2342 | // i: If(c) |
| 2343 | if (phi != nullptr && TrySetPhiInduction(phi, /*restrict_uses*/ false)) { |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2344 | HInstruction* s = block->GetFirstInstruction(); |
| 2345 | if (s != nullptr && s->IsSuspendCheck()) { |
| 2346 | HInstruction* c = s->GetNext(); |
Aart Bik | d86c085 | 2017-04-14 12:00:15 -0700 | [diff] [blame] | 2347 | if (c != nullptr && |
| 2348 | c->IsCondition() && |
| 2349 | c->GetUses().HasExactlyOneElement() && // only used for termination |
| 2350 | !c->HasEnvironmentUses()) { // unlikely, but not impossible |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2351 | HInstruction* i = c->GetNext(); |
| 2352 | if (i != nullptr && i->IsIf() && i->InputAt(0) == c) { |
| 2353 | iset_->insert(c); |
| 2354 | iset_->insert(s); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2355 | *main_phi = phi; |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2356 | return true; |
| 2357 | } |
| 2358 | } |
| 2359 | } |
| 2360 | } |
| 2361 | return false; |
| 2362 | } |
| 2363 | |
| 2364 | bool HLoopOptimization::IsEmptyBody(HBasicBlock* block) { |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2365 | if (!block->GetPhis().IsEmpty()) { |
| 2366 | return false; |
| 2367 | } |
| 2368 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 2369 | HInstruction* instruction = it.Current(); |
| 2370 | if (!instruction->IsGoto() && iset_->find(instruction) == iset_->end()) { |
| 2371 | return false; |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2372 | } |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2373 | } |
| 2374 | return true; |
| 2375 | } |
| 2376 | |
| 2377 | bool HLoopOptimization::IsUsedOutsideLoop(HLoopInformation* loop_info, |
| 2378 | HInstruction* instruction) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2379 | // Deal with regular uses. |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2380 | for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) { |
| 2381 | if (use.GetUser()->GetBlock()->GetLoopInformation() != loop_info) { |
| 2382 | return true; |
| 2383 | } |
Aart Bik | cc42be0 | 2016-10-20 16:14:16 -0700 | [diff] [blame] | 2384 | } |
| 2385 | return false; |
| 2386 | } |
| 2387 | |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 2388 | bool HLoopOptimization::IsOnlyUsedAfterLoop(HLoopInformation* loop_info, |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 2389 | HInstruction* instruction, |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2390 | bool collect_loop_uses, |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 2391 | /*out*/ uint32_t* use_count) { |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2392 | // Deal with regular uses. |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 2393 | for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) { |
| 2394 | HInstruction* user = use.GetUser(); |
| 2395 | if (iset_->find(user) == iset_->end()) { // not excluded? |
| 2396 | HLoopInformation* other_loop_info = user->GetBlock()->GetLoopInformation(); |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 2397 | if (other_loop_info != nullptr && other_loop_info->IsIn(*loop_info)) { |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2398 | // If collect_loop_uses is set, simply keep adding those uses to the set. |
| 2399 | // Otherwise, reject uses inside the loop that were not already in the set. |
| 2400 | if (collect_loop_uses) { |
| 2401 | iset_->insert(user); |
| 2402 | continue; |
| 2403 | } |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 2404 | return false; |
| 2405 | } |
| 2406 | ++*use_count; |
| 2407 | } |
| 2408 | } |
| 2409 | return true; |
| 2410 | } |
| 2411 | |
Nicolas Geoffray | 1a0a519 | 2017-06-22 11:56:01 +0100 | [diff] [blame] | 2412 | bool HLoopOptimization::TryReplaceWithLastValue(HLoopInformation* loop_info, |
| 2413 | HInstruction* instruction, |
| 2414 | HBasicBlock* block) { |
| 2415 | // Try to replace outside uses with the last value. |
Aart Bik | 807868e | 2016-11-03 17:51:43 -0700 | [diff] [blame] | 2416 | if (induction_range_.CanGenerateLastValue(instruction)) { |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2417 | HInstruction* replacement = induction_range_.GenerateLastValue(instruction, graph_, block); |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2418 | // Deal with regular uses. |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2419 | const HUseList<HInstruction*>& uses = instruction->GetUses(); |
| 2420 | for (auto it = uses.begin(), end = uses.end(); it != end;) { |
| 2421 | HInstruction* user = it->GetUser(); |
| 2422 | size_t index = it->GetIndex(); |
| 2423 | ++it; // increment before replacing |
| 2424 | if (iset_->find(user) == iset_->end()) { // not excluded? |
Nicolas Geoffray | 1a0a519 | 2017-06-22 11:56:01 +0100 | [diff] [blame] | 2425 | if (kIsDebugBuild) { |
| 2426 | // We have checked earlier in 'IsOnlyUsedAfterLoop' that the use is after the loop. |
| 2427 | HLoopInformation* other_loop_info = user->GetBlock()->GetLoopInformation(); |
| 2428 | CHECK(other_loop_info == nullptr || !other_loop_info->IsIn(*loop_info)); |
| 2429 | } |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2430 | user->ReplaceInput(replacement, index); |
| 2431 | induction_range_.Replace(user, instruction, replacement); // update induction |
| 2432 | } |
| 2433 | } |
Aart Bik | b29f684 | 2017-07-28 15:58:41 -0700 | [diff] [blame] | 2434 | // Deal with environment uses. |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2435 | const HUseList<HEnvironment*>& env_uses = instruction->GetEnvUses(); |
| 2436 | for (auto it = env_uses.begin(), end = env_uses.end(); it != end;) { |
| 2437 | HEnvironment* user = it->GetUser(); |
| 2438 | size_t index = it->GetIndex(); |
| 2439 | ++it; // increment before replacing |
| 2440 | if (iset_->find(user->GetHolder()) == iset_->end()) { // not excluded? |
Nicolas Geoffray | 1a0a519 | 2017-06-22 11:56:01 +0100 | [diff] [blame] | 2441 | // Only update environment uses after the loop. |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2442 | HLoopInformation* other_loop_info = user->GetHolder()->GetBlock()->GetLoopInformation(); |
Nicolas Geoffray | 1a0a519 | 2017-06-22 11:56:01 +0100 | [diff] [blame] | 2443 | if (other_loop_info == nullptr || !other_loop_info->IsIn(*loop_info)) { |
| 2444 | user->RemoveAsUserOfInput(index); |
| 2445 | user->SetRawEnvAt(index, replacement); |
| 2446 | replacement->AddEnvUseAt(user, index); |
| 2447 | } |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2448 | } |
| 2449 | } |
Aart Bik | 807868e | 2016-11-03 17:51:43 -0700 | [diff] [blame] | 2450 | return true; |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 2451 | } |
Aart Bik | 807868e | 2016-11-03 17:51:43 -0700 | [diff] [blame] | 2452 | return false; |
Aart Bik | 8c4a854 | 2016-10-06 11:36:57 -0700 | [diff] [blame] | 2453 | } |
| 2454 | |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2455 | bool HLoopOptimization::TryAssignLastValue(HLoopInformation* loop_info, |
| 2456 | HInstruction* instruction, |
| 2457 | HBasicBlock* block, |
| 2458 | bool collect_loop_uses) { |
| 2459 | // Assigning the last value is always successful if there are no uses. |
| 2460 | // Otherwise, it succeeds in a no early-exit loop by generating the |
| 2461 | // proper last value assignment. |
Aart Bik | 38a3f21 | 2017-10-20 17:02:21 -0700 | [diff] [blame] | 2462 | uint32_t use_count = 0; |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2463 | return IsOnlyUsedAfterLoop(loop_info, instruction, collect_loop_uses, &use_count) && |
| 2464 | (use_count == 0 || |
Nicolas Geoffray | 1a0a519 | 2017-06-22 11:56:01 +0100 | [diff] [blame] | 2465 | (!IsEarlyExit(loop_info) && TryReplaceWithLastValue(loop_info, instruction, block))); |
Aart Bik | f8f5a16 | 2017-02-06 15:35:29 -0800 | [diff] [blame] | 2466 | } |
| 2467 | |
Aart Bik | 6b69e0a | 2017-01-11 10:20:43 -0800 | [diff] [blame] | 2468 | void HLoopOptimization::RemoveDeadInstructions(const HInstructionList& list) { |
| 2469 | for (HBackwardInstructionIterator i(list); !i.Done(); i.Advance()) { |
| 2470 | HInstruction* instruction = i.Current(); |
| 2471 | if (instruction->IsDeadAndRemovable()) { |
| 2472 | simplified_ = true; |
| 2473 | instruction->GetBlock()->RemoveInstructionOrPhi(instruction); |
| 2474 | } |
| 2475 | } |
| 2476 | } |
| 2477 | |
Aart Bik | 14a68b4 | 2017-06-08 14:06:58 -0700 | [diff] [blame] | 2478 | bool HLoopOptimization::CanRemoveCycle() { |
| 2479 | for (HInstruction* i : *iset_) { |
| 2480 | // We can never remove instructions that have environment |
| 2481 | // uses when we compile 'debuggable'. |
| 2482 | if (i->HasEnvironmentUses() && graph_->IsDebuggable()) { |
| 2483 | return false; |
| 2484 | } |
| 2485 | // A deoptimization should never have an environment input removed. |
| 2486 | for (const HUseListNode<HEnvironment*>& use : i->GetEnvUses()) { |
| 2487 | if (use.GetUser()->GetHolder()->IsDeoptimize()) { |
| 2488 | return false; |
| 2489 | } |
| 2490 | } |
| 2491 | } |
| 2492 | return true; |
| 2493 | } |
| 2494 | |
Aart Bik | 281c681 | 2016-08-26 11:31:48 -0700 | [diff] [blame] | 2495 | } // namespace art |