xueliang.zhong | c239a2b | 2017-04-27 15:31:37 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2017 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "load_store_analysis.h" |
| 18 | |
Alex Light | 3a73ffb | 2021-01-25 14:11:05 +0000 | [diff] [blame] | 19 | #include "base/scoped_arena_allocator.h" |
| 20 | #include "optimizing/escape.h" |
| 21 | |
VladimĂr Marko | 434d968 | 2022-11-04 14:04:17 +0000 | [diff] [blame] | 22 | namespace art HIDDEN { |
xueliang.zhong | c239a2b | 2017-04-27 15:31:37 +0100 | [diff] [blame] | 23 | |
| 24 | // A cap for the number of heap locations to prevent pathological time/space consumption. |
| 25 | // The number of heap locations for most of the methods stays below this threshold. |
| 26 | constexpr size_t kMaxNumberOfHeapLocations = 32; |
| 27 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 28 | // Test if two integer ranges [l1,h1] and [l2,h2] overlap. |
| 29 | // Note that the ranges are inclusive on both ends. |
| 30 | // l1|------|h1 |
| 31 | // l2|------|h2 |
| 32 | static bool CanIntegerRangesOverlap(int64_t l1, int64_t h1, int64_t l2, int64_t h2) { |
| 33 | return std::max(l1, l2) <= std::min(h1, h2); |
| 34 | } |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 35 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 36 | static bool CanBinaryOpAndIndexAlias(const HBinaryOperation* idx1, |
| 37 | const size_t vector_length1, |
| 38 | const HInstruction* idx2, |
| 39 | const size_t vector_length2) { |
| 40 | if (!IsAddOrSub(idx1)) { |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 41 | // We currently only support Add and Sub operations. |
| 42 | return true; |
| 43 | } |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 44 | if (idx1->AsBinaryOperation()->GetLeastConstantLeft() != idx2) { |
| 45 | // Cannot analyze [i+CONST1] and [j]. |
| 46 | return true; |
| 47 | } |
| 48 | if (!idx1->GetConstantRight()->IsIntConstant()) { |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 49 | return true; |
| 50 | } |
| 51 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 52 | // Since 'i' are the same in [i+CONST] and [i], |
| 53 | // further compare [CONST] and [0]. |
| 54 | int64_t l1 = idx1->IsAdd() ? |
| 55 | idx1->GetConstantRight()->AsIntConstant()->GetValue() : |
| 56 | -idx1->GetConstantRight()->AsIntConstant()->GetValue(); |
| 57 | int64_t l2 = 0; |
| 58 | int64_t h1 = l1 + (vector_length1 - 1); |
| 59 | int64_t h2 = l2 + (vector_length2 - 1); |
| 60 | return CanIntegerRangesOverlap(l1, h1, l2, h2); |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 61 | } |
| 62 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 63 | static bool CanBinaryOpsAlias(const HBinaryOperation* idx1, |
| 64 | const size_t vector_length1, |
| 65 | const HBinaryOperation* idx2, |
| 66 | const size_t vector_length2) { |
| 67 | if (!IsAddOrSub(idx1) || !IsAddOrSub(idx2)) { |
| 68 | // We currently only support Add and Sub operations. |
| 69 | return true; |
| 70 | } |
| 71 | if (idx1->AsBinaryOperation()->GetLeastConstantLeft() != |
| 72 | idx2->AsBinaryOperation()->GetLeastConstantLeft()) { |
| 73 | // Cannot analyze [i+CONST1] and [j+CONST2]. |
| 74 | return true; |
| 75 | } |
| 76 | if (!idx1->GetConstantRight()->IsIntConstant() || |
| 77 | !idx2->GetConstantRight()->IsIntConstant()) { |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 78 | return true; |
| 79 | } |
| 80 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 81 | // Since 'i' are the same in [i+CONST1] and [i+CONST2], |
| 82 | // further compare [CONST1] and [CONST2]. |
| 83 | int64_t l1 = idx1->IsAdd() ? |
| 84 | idx1->GetConstantRight()->AsIntConstant()->GetValue() : |
| 85 | -idx1->GetConstantRight()->AsIntConstant()->GetValue(); |
| 86 | int64_t l2 = idx2->IsAdd() ? |
| 87 | idx2->GetConstantRight()->AsIntConstant()->GetValue() : |
| 88 | -idx2->GetConstantRight()->AsIntConstant()->GetValue(); |
| 89 | int64_t h1 = l1 + (vector_length1 - 1); |
| 90 | int64_t h2 = l2 + (vector_length2 - 1); |
| 91 | return CanIntegerRangesOverlap(l1, h1, l2, h2); |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 92 | } |
| 93 | |
Alex Light | 86fe9b8 | 2020-11-16 16:54:01 +0000 | [diff] [blame] | 94 | // Make sure we mark any writes/potential writes to heap-locations within partially |
| 95 | // escaped values as escaping. |
| 96 | void ReferenceInfo::PrunePartialEscapeWrites() { |
Vladimir Marko | 5c82493 | 2021-06-02 15:54:17 +0100 | [diff] [blame] | 97 | DCHECK(subgraph_ != nullptr); |
| 98 | if (!subgraph_->IsValid()) { |
Alex Light | 86fe9b8 | 2020-11-16 16:54:01 +0000 | [diff] [blame] | 99 | // All paths escape. |
| 100 | return; |
| 101 | } |
| 102 | HGraph* graph = reference_->GetBlock()->GetGraph(); |
| 103 | ArenaBitVector additional_exclusions( |
| 104 | allocator_, graph->GetBlocks().size(), false, kArenaAllocLSA); |
| 105 | for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) { |
| 106 | const HInstruction* user = use.GetUser(); |
Alex Light | 3a73ffb | 2021-01-25 14:11:05 +0000 | [diff] [blame] | 107 | if (!additional_exclusions.IsBitSet(user->GetBlock()->GetBlockId()) && |
Vladimir Marko | 5c82493 | 2021-06-02 15:54:17 +0100 | [diff] [blame] | 108 | subgraph_->ContainsBlock(user->GetBlock()) && |
Alex Light | 86fe9b8 | 2020-11-16 16:54:01 +0000 | [diff] [blame] | 109 | (user->IsUnresolvedInstanceFieldSet() || user->IsUnresolvedStaticFieldSet() || |
| 110 | user->IsInstanceFieldSet() || user->IsStaticFieldSet() || user->IsArraySet()) && |
Alex Light | 3a73ffb | 2021-01-25 14:11:05 +0000 | [diff] [blame] | 111 | (reference_ == user->InputAt(0)) && |
Vladimir Marko | 5c82493 | 2021-06-02 15:54:17 +0100 | [diff] [blame] | 112 | std::any_of(subgraph_->UnreachableBlocks().begin(), |
| 113 | subgraph_->UnreachableBlocks().end(), |
Alex Light | 86fe9b8 | 2020-11-16 16:54:01 +0000 | [diff] [blame] | 114 | [&](const HBasicBlock* excluded) -> bool { |
| 115 | return reference_->GetBlock()->GetGraph()->PathBetween(excluded, |
| 116 | user->GetBlock()); |
| 117 | })) { |
| 118 | // This object had memory written to it somewhere, if it escaped along |
| 119 | // some paths prior to the current block this write also counts as an |
| 120 | // escape. |
| 121 | additional_exclusions.SetBit(user->GetBlock()->GetBlockId()); |
| 122 | } |
| 123 | } |
| 124 | if (UNLIKELY(additional_exclusions.IsAnyBitSet())) { |
| 125 | for (uint32_t exc : additional_exclusions.Indexes()) { |
Vladimir Marko | 5c82493 | 2021-06-02 15:54:17 +0100 | [diff] [blame] | 126 | subgraph_->RemoveBlock(graph->GetBlocks()[exc]); |
Alex Light | 86fe9b8 | 2020-11-16 16:54:01 +0000 | [diff] [blame] | 127 | } |
| 128 | } |
| 129 | } |
| 130 | |
| 131 | bool HeapLocationCollector::InstructionEligibleForLSERemoval(HInstruction* inst) const { |
| 132 | if (inst->IsNewInstance()) { |
| 133 | return !inst->AsNewInstance()->NeedsChecks(); |
| 134 | } else if (inst->IsNewArray()) { |
| 135 | HInstruction* array_length = inst->AsNewArray()->GetLength(); |
| 136 | bool known_array_length = |
| 137 | array_length->IsIntConstant() && array_length->AsIntConstant()->GetValue() >= 0; |
| 138 | return known_array_length && |
| 139 | std::all_of(inst->GetUses().cbegin(), |
| 140 | inst->GetUses().cend(), |
| 141 | [&](const HUseListNode<HInstruction*>& user) { |
| 142 | if (user.GetUser()->IsArrayGet() || user.GetUser()->IsArraySet()) { |
| 143 | return user.GetUser()->InputAt(1)->IsIntConstant(); |
| 144 | } |
| 145 | return true; |
| 146 | }); |
| 147 | } else { |
| 148 | return false; |
| 149 | } |
| 150 | } |
| 151 | |
Alex Light | 3a73ffb | 2021-01-25 14:11:05 +0000 | [diff] [blame] | 152 | void ReferenceInfo::CollectPartialEscapes(HGraph* graph) { |
| 153 | ScopedArenaAllocator saa(graph->GetArenaStack()); |
| 154 | ArenaBitVector seen_instructions(&saa, graph->GetCurrentInstructionId(), false, kArenaAllocLSA); |
| 155 | // Get regular escapes. |
| 156 | ScopedArenaVector<HInstruction*> additional_escape_vectors(saa.Adapter(kArenaAllocLSA)); |
| 157 | LambdaEscapeVisitor scan_instructions([&](HInstruction* escape) -> bool { |
| 158 | HandleEscape(escape); |
| 159 | // LSE can't track heap-locations through Phi and Select instructions so we |
| 160 | // need to assume all escapes from these are escapes for the base reference. |
| 161 | if ((escape->IsPhi() || escape->IsSelect()) && !seen_instructions.IsBitSet(escape->GetId())) { |
| 162 | seen_instructions.SetBit(escape->GetId()); |
| 163 | additional_escape_vectors.push_back(escape); |
| 164 | } |
| 165 | return true; |
| 166 | }); |
| 167 | additional_escape_vectors.push_back(reference_); |
| 168 | while (!additional_escape_vectors.empty()) { |
| 169 | HInstruction* ref = additional_escape_vectors.back(); |
| 170 | additional_escape_vectors.pop_back(); |
| 171 | DCHECK(ref == reference_ || ref->IsPhi() || ref->IsSelect()) << *ref; |
| 172 | VisitEscapes(ref, scan_instructions); |
| 173 | } |
| 174 | |
| 175 | // Mark irreducible loop headers as escaping since they cannot be tracked through. |
| 176 | for (HBasicBlock* blk : graph->GetActiveBlocks()) { |
| 177 | if (blk->IsLoopHeader() && blk->GetLoopInformation()->IsIrreducible()) { |
| 178 | HandleEscape(blk); |
| 179 | } |
| 180 | } |
| 181 | } |
| 182 | |
Alex Light | 86fe9b8 | 2020-11-16 16:54:01 +0000 | [diff] [blame] | 183 | void HeapLocationCollector::DumpReferenceStats(OptimizingCompilerStats* stats) { |
| 184 | if (stats == nullptr) { |
| 185 | return; |
| 186 | } |
| 187 | std::vector<bool> seen_instructions(GetGraph()->GetCurrentInstructionId(), false); |
| 188 | for (auto hl : heap_locations_) { |
| 189 | auto ri = hl->GetReferenceInfo(); |
| 190 | if (ri == nullptr || seen_instructions[ri->GetReference()->GetId()]) { |
| 191 | continue; |
| 192 | } |
| 193 | auto instruction = ri->GetReference(); |
| 194 | seen_instructions[instruction->GetId()] = true; |
| 195 | if (ri->IsSingletonAndRemovable()) { |
| 196 | if (InstructionEligibleForLSERemoval(instruction)) { |
| 197 | MaybeRecordStat(stats, MethodCompilationStat::kFullLSEPossible); |
| 198 | } |
| 199 | } |
| 200 | // TODO This is an estimate of the number of allocations we will be able |
| 201 | // to (partially) remove. As additional work is done this can be refined. |
| 202 | if (ri->IsPartialSingleton() && instruction->IsNewInstance() && |
| 203 | ri->GetNoEscapeSubgraph()->ContainsBlock(instruction->GetBlock()) && |
| 204 | !ri->GetNoEscapeSubgraph()->GetExcludedCohorts().empty() && |
| 205 | InstructionEligibleForLSERemoval(instruction)) { |
| 206 | MaybeRecordStat(stats, MethodCompilationStat::kPartialLSEPossible); |
| 207 | } |
| 208 | } |
| 209 | } |
| 210 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 211 | bool HeapLocationCollector::CanArrayElementsAlias(const HInstruction* idx1, |
| 212 | const size_t vector_length1, |
| 213 | const HInstruction* idx2, |
| 214 | const size_t vector_length2) const { |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 215 | DCHECK(idx1 != nullptr); |
| 216 | DCHECK(idx2 != nullptr); |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 217 | DCHECK_GE(vector_length1, HeapLocation::kScalar); |
| 218 | DCHECK_GE(vector_length2, HeapLocation::kScalar); |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 219 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 220 | // [i] and [i]. |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 221 | if (idx1 == idx2) { |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 222 | return true; |
| 223 | } |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 224 | |
| 225 | // [CONST1] and [CONST2]. |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 226 | if (idx1->IsIntConstant() && idx2->IsIntConstant()) { |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 227 | int64_t l1 = idx1->AsIntConstant()->GetValue(); |
| 228 | int64_t l2 = idx2->AsIntConstant()->GetValue(); |
| 229 | // To avoid any overflow in following CONST+vector_length calculation, |
| 230 | // use int64_t instead of int32_t. |
| 231 | int64_t h1 = l1 + (vector_length1 - 1); |
| 232 | int64_t h2 = l2 + (vector_length2 - 1); |
| 233 | return CanIntegerRangesOverlap(l1, h1, l2, h2); |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 234 | } |
| 235 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 236 | // [i+CONST] and [i]. |
| 237 | if (idx1->IsBinaryOperation() && |
| 238 | idx1->AsBinaryOperation()->GetConstantRight() != nullptr && |
| 239 | idx1->AsBinaryOperation()->GetLeastConstantLeft() == idx2) { |
| 240 | return CanBinaryOpAndIndexAlias(idx1->AsBinaryOperation(), |
| 241 | vector_length1, |
| 242 | idx2, |
| 243 | vector_length2); |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 244 | } |
| 245 | |
xueliang.zhong | b50b16a | 2017-09-19 17:43:29 +0100 | [diff] [blame] | 246 | // [i] and [i+CONST]. |
| 247 | if (idx2->IsBinaryOperation() && |
| 248 | idx2->AsBinaryOperation()->GetConstantRight() != nullptr && |
| 249 | idx2->AsBinaryOperation()->GetLeastConstantLeft() == idx1) { |
| 250 | return CanBinaryOpAndIndexAlias(idx2->AsBinaryOperation(), |
| 251 | vector_length2, |
| 252 | idx1, |
| 253 | vector_length1); |
| 254 | } |
| 255 | |
| 256 | // [i+CONST1] and [i+CONST2]. |
| 257 | if (idx1->IsBinaryOperation() && |
| 258 | idx1->AsBinaryOperation()->GetConstantRight() != nullptr && |
| 259 | idx2->IsBinaryOperation() && |
| 260 | idx2->AsBinaryOperation()->GetConstantRight() != nullptr) { |
| 261 | return CanBinaryOpsAlias(idx1->AsBinaryOperation(), |
| 262 | vector_length1, |
| 263 | idx2->AsBinaryOperation(), |
| 264 | vector_length2); |
xueliang.zhong | 016c0f1 | 2017-05-12 18:16:31 +0100 | [diff] [blame] | 265 | } |
| 266 | |
| 267 | // By default, MAY alias. |
| 268 | return true; |
| 269 | } |
| 270 | |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 271 | bool LoadStoreAnalysis::Run() { |
xueliang.zhong | c239a2b | 2017-04-27 15:31:37 +0100 | [diff] [blame] | 272 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
| 273 | heap_location_collector_.VisitBasicBlock(block); |
| 274 | } |
| 275 | |
| 276 | if (heap_location_collector_.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) { |
| 277 | // Bail out if there are too many heap locations to deal with. |
| 278 | heap_location_collector_.CleanUp(); |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 279 | return false; |
xueliang.zhong | c239a2b | 2017-04-27 15:31:37 +0100 | [diff] [blame] | 280 | } |
| 281 | if (!heap_location_collector_.HasHeapStores()) { |
| 282 | // Without heap stores, this pass would act mostly as GVN on heap accesses. |
| 283 | heap_location_collector_.CleanUp(); |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 284 | return false; |
xueliang.zhong | c239a2b | 2017-04-27 15:31:37 +0100 | [diff] [blame] | 285 | } |
xueliang.zhong | c239a2b | 2017-04-27 15:31:37 +0100 | [diff] [blame] | 286 | heap_location_collector_.BuildAliasingMatrix(); |
Alex Light | 86fe9b8 | 2020-11-16 16:54:01 +0000 | [diff] [blame] | 287 | heap_location_collector_.DumpReferenceStats(stats_); |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 288 | return true; |
xueliang.zhong | c239a2b | 2017-04-27 15:31:37 +0100 | [diff] [blame] | 289 | } |
| 290 | |
| 291 | } // namespace art |