blob: f1c50ac03c4e69fabd3d2c0ed45cbca2a8d223b1 [file] [log] [blame]
xueliang.zhongc239a2b2017-04-27 15:31:37 +01001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "load_store_analysis.h"
18
Alex Light3a73ffb2021-01-25 14:11:05 +000019#include "base/scoped_arena_allocator.h"
20#include "optimizing/escape.h"
21
VladimĂ­r Marko434d9682022-11-04 14:04:17 +000022namespace art HIDDEN {
xueliang.zhongc239a2b2017-04-27 15:31:37 +010023
24// A cap for the number of heap locations to prevent pathological time/space consumption.
25// The number of heap locations for most of the methods stays below this threshold.
26constexpr size_t kMaxNumberOfHeapLocations = 32;
27
xueliang.zhongb50b16a2017-09-19 17:43:29 +010028// Test if two integer ranges [l1,h1] and [l2,h2] overlap.
29// Note that the ranges are inclusive on both ends.
30// l1|------|h1
31// l2|------|h2
32static bool CanIntegerRangesOverlap(int64_t l1, int64_t h1, int64_t l2, int64_t h2) {
33 return std::max(l1, l2) <= std::min(h1, h2);
34}
xueliang.zhong016c0f12017-05-12 18:16:31 +010035
xueliang.zhongb50b16a2017-09-19 17:43:29 +010036static bool CanBinaryOpAndIndexAlias(const HBinaryOperation* idx1,
37 const size_t vector_length1,
38 const HInstruction* idx2,
39 const size_t vector_length2) {
40 if (!IsAddOrSub(idx1)) {
xueliang.zhong016c0f12017-05-12 18:16:31 +010041 // We currently only support Add and Sub operations.
42 return true;
43 }
xueliang.zhongb50b16a2017-09-19 17:43:29 +010044 if (idx1->AsBinaryOperation()->GetLeastConstantLeft() != idx2) {
45 // Cannot analyze [i+CONST1] and [j].
46 return true;
47 }
48 if (!idx1->GetConstantRight()->IsIntConstant()) {
xueliang.zhong016c0f12017-05-12 18:16:31 +010049 return true;
50 }
51
xueliang.zhongb50b16a2017-09-19 17:43:29 +010052 // Since 'i' are the same in [i+CONST] and [i],
53 // further compare [CONST] and [0].
54 int64_t l1 = idx1->IsAdd() ?
55 idx1->GetConstantRight()->AsIntConstant()->GetValue() :
56 -idx1->GetConstantRight()->AsIntConstant()->GetValue();
57 int64_t l2 = 0;
58 int64_t h1 = l1 + (vector_length1 - 1);
59 int64_t h2 = l2 + (vector_length2 - 1);
60 return CanIntegerRangesOverlap(l1, h1, l2, h2);
xueliang.zhong016c0f12017-05-12 18:16:31 +010061}
62
xueliang.zhongb50b16a2017-09-19 17:43:29 +010063static bool CanBinaryOpsAlias(const HBinaryOperation* idx1,
64 const size_t vector_length1,
65 const HBinaryOperation* idx2,
66 const size_t vector_length2) {
67 if (!IsAddOrSub(idx1) || !IsAddOrSub(idx2)) {
68 // We currently only support Add and Sub operations.
69 return true;
70 }
71 if (idx1->AsBinaryOperation()->GetLeastConstantLeft() !=
72 idx2->AsBinaryOperation()->GetLeastConstantLeft()) {
73 // Cannot analyze [i+CONST1] and [j+CONST2].
74 return true;
75 }
76 if (!idx1->GetConstantRight()->IsIntConstant() ||
77 !idx2->GetConstantRight()->IsIntConstant()) {
xueliang.zhong016c0f12017-05-12 18:16:31 +010078 return true;
79 }
80
xueliang.zhongb50b16a2017-09-19 17:43:29 +010081 // Since 'i' are the same in [i+CONST1] and [i+CONST2],
82 // further compare [CONST1] and [CONST2].
83 int64_t l1 = idx1->IsAdd() ?
84 idx1->GetConstantRight()->AsIntConstant()->GetValue() :
85 -idx1->GetConstantRight()->AsIntConstant()->GetValue();
86 int64_t l2 = idx2->IsAdd() ?
87 idx2->GetConstantRight()->AsIntConstant()->GetValue() :
88 -idx2->GetConstantRight()->AsIntConstant()->GetValue();
89 int64_t h1 = l1 + (vector_length1 - 1);
90 int64_t h2 = l2 + (vector_length2 - 1);
91 return CanIntegerRangesOverlap(l1, h1, l2, h2);
xueliang.zhong016c0f12017-05-12 18:16:31 +010092}
93
Alex Light86fe9b82020-11-16 16:54:01 +000094// Make sure we mark any writes/potential writes to heap-locations within partially
95// escaped values as escaping.
96void ReferenceInfo::PrunePartialEscapeWrites() {
Vladimir Marko5c824932021-06-02 15:54:17 +010097 DCHECK(subgraph_ != nullptr);
98 if (!subgraph_->IsValid()) {
Alex Light86fe9b82020-11-16 16:54:01 +000099 // All paths escape.
100 return;
101 }
102 HGraph* graph = reference_->GetBlock()->GetGraph();
103 ArenaBitVector additional_exclusions(
104 allocator_, graph->GetBlocks().size(), false, kArenaAllocLSA);
105 for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) {
106 const HInstruction* user = use.GetUser();
Alex Light3a73ffb2021-01-25 14:11:05 +0000107 if (!additional_exclusions.IsBitSet(user->GetBlock()->GetBlockId()) &&
Vladimir Marko5c824932021-06-02 15:54:17 +0100108 subgraph_->ContainsBlock(user->GetBlock()) &&
Alex Light86fe9b82020-11-16 16:54:01 +0000109 (user->IsUnresolvedInstanceFieldSet() || user->IsUnresolvedStaticFieldSet() ||
110 user->IsInstanceFieldSet() || user->IsStaticFieldSet() || user->IsArraySet()) &&
Alex Light3a73ffb2021-01-25 14:11:05 +0000111 (reference_ == user->InputAt(0)) &&
Vladimir Marko5c824932021-06-02 15:54:17 +0100112 std::any_of(subgraph_->UnreachableBlocks().begin(),
113 subgraph_->UnreachableBlocks().end(),
Alex Light86fe9b82020-11-16 16:54:01 +0000114 [&](const HBasicBlock* excluded) -> bool {
115 return reference_->GetBlock()->GetGraph()->PathBetween(excluded,
116 user->GetBlock());
117 })) {
118 // This object had memory written to it somewhere, if it escaped along
119 // some paths prior to the current block this write also counts as an
120 // escape.
121 additional_exclusions.SetBit(user->GetBlock()->GetBlockId());
122 }
123 }
124 if (UNLIKELY(additional_exclusions.IsAnyBitSet())) {
125 for (uint32_t exc : additional_exclusions.Indexes()) {
Vladimir Marko5c824932021-06-02 15:54:17 +0100126 subgraph_->RemoveBlock(graph->GetBlocks()[exc]);
Alex Light86fe9b82020-11-16 16:54:01 +0000127 }
128 }
129}
130
131bool HeapLocationCollector::InstructionEligibleForLSERemoval(HInstruction* inst) const {
132 if (inst->IsNewInstance()) {
133 return !inst->AsNewInstance()->NeedsChecks();
134 } else if (inst->IsNewArray()) {
135 HInstruction* array_length = inst->AsNewArray()->GetLength();
136 bool known_array_length =
137 array_length->IsIntConstant() && array_length->AsIntConstant()->GetValue() >= 0;
138 return known_array_length &&
139 std::all_of(inst->GetUses().cbegin(),
140 inst->GetUses().cend(),
141 [&](const HUseListNode<HInstruction*>& user) {
142 if (user.GetUser()->IsArrayGet() || user.GetUser()->IsArraySet()) {
143 return user.GetUser()->InputAt(1)->IsIntConstant();
144 }
145 return true;
146 });
147 } else {
148 return false;
149 }
150}
151
Alex Light3a73ffb2021-01-25 14:11:05 +0000152void ReferenceInfo::CollectPartialEscapes(HGraph* graph) {
153 ScopedArenaAllocator saa(graph->GetArenaStack());
154 ArenaBitVector seen_instructions(&saa, graph->GetCurrentInstructionId(), false, kArenaAllocLSA);
155 // Get regular escapes.
156 ScopedArenaVector<HInstruction*> additional_escape_vectors(saa.Adapter(kArenaAllocLSA));
157 LambdaEscapeVisitor scan_instructions([&](HInstruction* escape) -> bool {
158 HandleEscape(escape);
159 // LSE can't track heap-locations through Phi and Select instructions so we
160 // need to assume all escapes from these are escapes for the base reference.
161 if ((escape->IsPhi() || escape->IsSelect()) && !seen_instructions.IsBitSet(escape->GetId())) {
162 seen_instructions.SetBit(escape->GetId());
163 additional_escape_vectors.push_back(escape);
164 }
165 return true;
166 });
167 additional_escape_vectors.push_back(reference_);
168 while (!additional_escape_vectors.empty()) {
169 HInstruction* ref = additional_escape_vectors.back();
170 additional_escape_vectors.pop_back();
171 DCHECK(ref == reference_ || ref->IsPhi() || ref->IsSelect()) << *ref;
172 VisitEscapes(ref, scan_instructions);
173 }
174
175 // Mark irreducible loop headers as escaping since they cannot be tracked through.
176 for (HBasicBlock* blk : graph->GetActiveBlocks()) {
177 if (blk->IsLoopHeader() && blk->GetLoopInformation()->IsIrreducible()) {
178 HandleEscape(blk);
179 }
180 }
181}
182
Alex Light86fe9b82020-11-16 16:54:01 +0000183void HeapLocationCollector::DumpReferenceStats(OptimizingCompilerStats* stats) {
184 if (stats == nullptr) {
185 return;
186 }
187 std::vector<bool> seen_instructions(GetGraph()->GetCurrentInstructionId(), false);
188 for (auto hl : heap_locations_) {
189 auto ri = hl->GetReferenceInfo();
190 if (ri == nullptr || seen_instructions[ri->GetReference()->GetId()]) {
191 continue;
192 }
193 auto instruction = ri->GetReference();
194 seen_instructions[instruction->GetId()] = true;
195 if (ri->IsSingletonAndRemovable()) {
196 if (InstructionEligibleForLSERemoval(instruction)) {
197 MaybeRecordStat(stats, MethodCompilationStat::kFullLSEPossible);
198 }
199 }
200 // TODO This is an estimate of the number of allocations we will be able
201 // to (partially) remove. As additional work is done this can be refined.
202 if (ri->IsPartialSingleton() && instruction->IsNewInstance() &&
203 ri->GetNoEscapeSubgraph()->ContainsBlock(instruction->GetBlock()) &&
204 !ri->GetNoEscapeSubgraph()->GetExcludedCohorts().empty() &&
205 InstructionEligibleForLSERemoval(instruction)) {
206 MaybeRecordStat(stats, MethodCompilationStat::kPartialLSEPossible);
207 }
208 }
209}
210
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100211bool HeapLocationCollector::CanArrayElementsAlias(const HInstruction* idx1,
212 const size_t vector_length1,
213 const HInstruction* idx2,
214 const size_t vector_length2) const {
xueliang.zhong016c0f12017-05-12 18:16:31 +0100215 DCHECK(idx1 != nullptr);
216 DCHECK(idx2 != nullptr);
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100217 DCHECK_GE(vector_length1, HeapLocation::kScalar);
218 DCHECK_GE(vector_length2, HeapLocation::kScalar);
xueliang.zhong016c0f12017-05-12 18:16:31 +0100219
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100220 // [i] and [i].
xueliang.zhong016c0f12017-05-12 18:16:31 +0100221 if (idx1 == idx2) {
xueliang.zhong016c0f12017-05-12 18:16:31 +0100222 return true;
223 }
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100224
225 // [CONST1] and [CONST2].
xueliang.zhong016c0f12017-05-12 18:16:31 +0100226 if (idx1->IsIntConstant() && idx2->IsIntConstant()) {
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100227 int64_t l1 = idx1->AsIntConstant()->GetValue();
228 int64_t l2 = idx2->AsIntConstant()->GetValue();
229 // To avoid any overflow in following CONST+vector_length calculation,
230 // use int64_t instead of int32_t.
231 int64_t h1 = l1 + (vector_length1 - 1);
232 int64_t h2 = l2 + (vector_length2 - 1);
233 return CanIntegerRangesOverlap(l1, h1, l2, h2);
xueliang.zhong016c0f12017-05-12 18:16:31 +0100234 }
235
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100236 // [i+CONST] and [i].
237 if (idx1->IsBinaryOperation() &&
238 idx1->AsBinaryOperation()->GetConstantRight() != nullptr &&
239 idx1->AsBinaryOperation()->GetLeastConstantLeft() == idx2) {
240 return CanBinaryOpAndIndexAlias(idx1->AsBinaryOperation(),
241 vector_length1,
242 idx2,
243 vector_length2);
xueliang.zhong016c0f12017-05-12 18:16:31 +0100244 }
245
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100246 // [i] and [i+CONST].
247 if (idx2->IsBinaryOperation() &&
248 idx2->AsBinaryOperation()->GetConstantRight() != nullptr &&
249 idx2->AsBinaryOperation()->GetLeastConstantLeft() == idx1) {
250 return CanBinaryOpAndIndexAlias(idx2->AsBinaryOperation(),
251 vector_length2,
252 idx1,
253 vector_length1);
254 }
255
256 // [i+CONST1] and [i+CONST2].
257 if (idx1->IsBinaryOperation() &&
258 idx1->AsBinaryOperation()->GetConstantRight() != nullptr &&
259 idx2->IsBinaryOperation() &&
260 idx2->AsBinaryOperation()->GetConstantRight() != nullptr) {
261 return CanBinaryOpsAlias(idx1->AsBinaryOperation(),
262 vector_length1,
263 idx2->AsBinaryOperation(),
264 vector_length2);
xueliang.zhong016c0f12017-05-12 18:16:31 +0100265 }
266
267 // By default, MAY alias.
268 return true;
269}
270
Aart Bik24773202018-04-26 10:28:51 -0700271bool LoadStoreAnalysis::Run() {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100272 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
273 heap_location_collector_.VisitBasicBlock(block);
274 }
275
276 if (heap_location_collector_.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
277 // Bail out if there are too many heap locations to deal with.
278 heap_location_collector_.CleanUp();
Aart Bik24773202018-04-26 10:28:51 -0700279 return false;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100280 }
281 if (!heap_location_collector_.HasHeapStores()) {
282 // Without heap stores, this pass would act mostly as GVN on heap accesses.
283 heap_location_collector_.CleanUp();
Aart Bik24773202018-04-26 10:28:51 -0700284 return false;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100285 }
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100286 heap_location_collector_.BuildAliasingMatrix();
Alex Light86fe9b82020-11-16 16:54:01 +0000287 heap_location_collector_.DumpReferenceStats(stats_);
Aart Bik24773202018-04-26 10:28:51 -0700288 return true;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100289}
290
291} // namespace art