blob: 4975bae2a2b79bddf044a3dff53ffb87c1f29b00 [file] [log] [blame]
xueliang.zhongc239a2b2017-04-27 15:31:37 +01001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
18#define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
19
Alex Light86fe9b82020-11-16 16:54:01 +000020#include "base/arena_allocator.h"
21#include "base/arena_bit_vector.h"
Vladimir Markoef898422020-06-08 10:26:06 +010022#include "base/bit_vector-inl.h"
Alex Light86fe9b82020-11-16 16:54:01 +000023#include "base/scoped_arena_allocator.h"
Vladimir Markoef898422020-06-08 10:26:06 +010024#include "base/scoped_arena_containers.h"
Alex Light86fe9b82020-11-16 16:54:01 +000025#include "base/stl_util.h"
xueliang.zhongc239a2b2017-04-27 15:31:37 +010026#include "escape.h"
Alex Light86fe9b82020-11-16 16:54:01 +000027#include "execution_subgraph.h"
xueliang.zhongc239a2b2017-04-27 15:31:37 +010028#include "nodes.h"
Alex Light86fe9b82020-11-16 16:54:01 +000029#include "optimizing/optimizing_compiler_stats.h"
xueliang.zhongc239a2b2017-04-27 15:31:37 +010030
Vladimir Marko0a516052019-10-14 13:00:44 +000031namespace art {
xueliang.zhongc239a2b2017-04-27 15:31:37 +010032
Alex Light3a73ffb2021-01-25 14:11:05 +000033enum class LoadStoreAnalysisType {
34 kBasic,
35 kNoPredicatedInstructions,
36 kFull,
37};
38
xueliang.zhongc239a2b2017-04-27 15:31:37 +010039// A ReferenceInfo contains additional info about a reference such as
40// whether it's a singleton, returned, etc.
Alex Light86fe9b82020-11-16 16:54:01 +000041class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
xueliang.zhongc239a2b2017-04-27 15:31:37 +010042 public:
Alex Light86fe9b82020-11-16 16:54:01 +000043 ReferenceInfo(HInstruction* reference,
44 ScopedArenaAllocator* allocator,
45 size_t pos,
Alex Light3a73ffb2021-01-25 14:11:05 +000046 LoadStoreAnalysisType elimination_type)
xueliang.zhongc239a2b2017-04-27 15:31:37 +010047 : reference_(reference),
48 position_(pos),
49 is_singleton_(true),
50 is_singleton_and_not_returned_(true),
Alex Light86fe9b82020-11-16 16:54:01 +000051 is_singleton_and_not_deopt_visible_(true),
52 allocator_(allocator),
Vladimir Marko5c824932021-06-02 15:54:17 +010053 subgraph_(nullptr) {
Alex Light86fe9b82020-11-16 16:54:01 +000054 // TODO We can do this in one pass.
55 // TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
56 // for now just ignore it.
Alex Light3a73ffb2021-01-25 14:11:05 +000057 bool can_be_partial = elimination_type != LoadStoreAnalysisType::kBasic &&
58 (/* reference_->IsNewArray() || */ reference_->IsNewInstance());
Alex Light86fe9b82020-11-16 16:54:01 +000059 if (can_be_partial) {
Vladimir Marko5c824932021-06-02 15:54:17 +010060 subgraph_.reset(
61 new (allocator) ExecutionSubgraph(reference->GetBlock()->GetGraph(), allocator));
Alex Light3a73ffb2021-01-25 14:11:05 +000062 CollectPartialEscapes(reference_->GetBlock()->GetGraph());
Alex Light86fe9b82020-11-16 16:54:01 +000063 }
xueliang.zhongc239a2b2017-04-27 15:31:37 +010064 CalculateEscape(reference_,
65 nullptr,
66 &is_singleton_,
67 &is_singleton_and_not_returned_,
68 &is_singleton_and_not_deopt_visible_);
Alex Light86fe9b82020-11-16 16:54:01 +000069 if (can_be_partial) {
Alex Light3a73ffb2021-01-25 14:11:05 +000070 if (elimination_type == LoadStoreAnalysisType::kNoPredicatedInstructions) {
71 // This is to mark writes to partially escaped values as also part of the escaped subset.
72 // TODO We can avoid this if we have a 'ConditionalWrite' instruction. Will require testing
73 // to see if the additional branches are worth it.
74 PrunePartialEscapeWrites();
75 }
Vladimir Marko5c824932021-06-02 15:54:17 +010076 DCHECK(subgraph_ != nullptr);
77 subgraph_->Finalize();
Alex Light86fe9b82020-11-16 16:54:01 +000078 } else {
Vladimir Marko5c824932021-06-02 15:54:17 +010079 DCHECK(subgraph_ == nullptr);
Alex Light86fe9b82020-11-16 16:54:01 +000080 }
81 }
82
83 const ExecutionSubgraph* GetNoEscapeSubgraph() const {
Vladimir Marko5c824932021-06-02 15:54:17 +010084 DCHECK(IsPartialSingleton());
85 return subgraph_.get();
xueliang.zhongc239a2b2017-04-27 15:31:37 +010086 }
87
88 HInstruction* GetReference() const {
89 return reference_;
90 }
91
92 size_t GetPosition() const {
93 return position_;
94 }
95
96 // Returns true if reference_ is the only name that can refer to its value during
97 // the lifetime of the method. So it's guaranteed to not have any alias in
98 // the method (including its callees).
99 bool IsSingleton() const {
100 return is_singleton_;
101 }
102
Alex Light86fe9b82020-11-16 16:54:01 +0000103 // This is a singleton and there are paths that don't escape the method
104 bool IsPartialSingleton() const {
105 auto ref = GetReference();
106 // TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
107 // for now just ignore it.
Vladimir Marko5c824932021-06-02 15:54:17 +0100108 return (/* ref->IsNewArray() || */ ref->IsNewInstance()) &&
109 subgraph_ != nullptr &&
110 subgraph_->IsValid();
Alex Light86fe9b82020-11-16 16:54:01 +0000111 }
112
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100113 // Returns true if reference_ is a singleton and not returned to the caller or
114 // used as an environment local of an HDeoptimize instruction.
115 // The allocation and stores into reference_ may be eliminated for such cases.
116 bool IsSingletonAndRemovable() const {
117 return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
118 }
119
120 // Returns true if reference_ is a singleton and returned to the caller or
121 // used as an environment local of an HDeoptimize instruction.
122 bool IsSingletonAndNonRemovable() const {
123 return is_singleton_ &&
124 (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
125 }
126
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100127 private:
Alex Light3a73ffb2021-01-25 14:11:05 +0000128 void CollectPartialEscapes(HGraph* graph);
129 void HandleEscape(HBasicBlock* escape) {
Vladimir Marko5c824932021-06-02 15:54:17 +0100130 DCHECK(subgraph_ != nullptr);
131 subgraph_->RemoveBlock(escape);
Alex Light3a73ffb2021-01-25 14:11:05 +0000132 }
133 void HandleEscape(HInstruction* escape) {
134 HandleEscape(escape->GetBlock());
Alex Light86fe9b82020-11-16 16:54:01 +0000135 }
136
137 // Make sure we mark any writes/potential writes to heap-locations within partially
138 // escaped values as escaping.
139 void PrunePartialEscapeWrites();
140
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100141 HInstruction* const reference_;
142 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
143
144 // Can only be referred to by a single name in the method.
145 bool is_singleton_;
146 // Is singleton and not returned to caller.
147 bool is_singleton_and_not_returned_;
148 // Is singleton and not used as an environment local of HDeoptimize.
149 bool is_singleton_and_not_deopt_visible_;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100150
Alex Light86fe9b82020-11-16 16:54:01 +0000151 ScopedArenaAllocator* allocator_;
152
Vladimir Marko5c824932021-06-02 15:54:17 +0100153 std::unique_ptr<ExecutionSubgraph> subgraph_;
Alex Light86fe9b82020-11-16 16:54:01 +0000154
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100155 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
156};
157
158// A heap location is a reference-offset/index pair that a value can be loaded from
159// or stored to.
Vladimir Marko009d1662017-10-10 13:21:15 +0100160class HeapLocation : public ArenaObject<kArenaAllocLSA> {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100161 public:
162 static constexpr size_t kInvalidFieldOffset = -1;
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100163 // Default value for heap locations which are not vector data.
164 static constexpr size_t kScalar = 1;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100165 // TODO: more fine-grained array types.
166 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
167
168 HeapLocation(ReferenceInfo* ref_info,
Aart Bikb765a3f2018-05-10 14:47:48 -0700169 DataType::Type type,
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100170 size_t offset,
171 HInstruction* index,
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100172 size_t vector_length,
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100173 int16_t declaring_class_def_index)
174 : ref_info_(ref_info),
Aart Bikb765a3f2018-05-10 14:47:48 -0700175 type_(DataType::ToSigned(type)),
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100176 offset_(offset),
177 index_(index),
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100178 vector_length_(vector_length),
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100179 declaring_class_def_index_(declaring_class_def_index),
Mingyao Yang0e3151b2017-10-30 11:19:57 -0700180 has_aliased_locations_(false) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100181 DCHECK(ref_info != nullptr);
182 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
183 (offset != kInvalidFieldOffset && index == nullptr));
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100184 }
185
186 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
Aart Bikb765a3f2018-05-10 14:47:48 -0700187 DataType::Type GetType() const { return type_; }
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100188 size_t GetOffset() const { return offset_; }
189 HInstruction* GetIndex() const { return index_; }
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100190 size_t GetVectorLength() const { return vector_length_; }
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100191
192 // Returns the definition of declaring class' dex index.
193 // It's kDeclaringClassDefIndexForArrays for an array element.
194 int16_t GetDeclaringClassDefIndex() const {
195 return declaring_class_def_index_;
196 }
197
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100198 bool IsArray() const {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100199 return index_ != nullptr;
200 }
201
Mingyao Yang0e3151b2017-10-30 11:19:57 -0700202 bool HasAliasedLocations() const {
203 return has_aliased_locations_;
204 }
205
206 void SetHasAliasedLocations(bool val) {
207 has_aliased_locations_ = val;
208 }
209
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100210 private:
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100211 // Reference for instance/static field, array element or vector data.
212 ReferenceInfo* const ref_info_;
Aart Bikb765a3f2018-05-10 14:47:48 -0700213 // Type of data residing at HeapLocation (always signed for integral
214 // data since e.g. a[i] and a[i] & 0xff are represented by differently
215 // signed types; char vs short are disambiguated through the reference).
216 const DataType::Type type_;
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100217 // Offset of static/instance field.
218 // Invalid when this HeapLocation is not field.
219 const size_t offset_;
220 // Index of an array element or starting index of vector data.
221 // Invalid when this HeapLocation is not array.
222 HInstruction* const index_;
223 // Vector length of vector data.
224 // When this HeapLocation is not vector data, it's value is kScalar.
225 const size_t vector_length_;
226 // Declaring class's def's dex index.
227 // Invalid when this HeapLocation is not field access.
228 const int16_t declaring_class_def_index_;
229
Mingyao Yang0e3151b2017-10-30 11:19:57 -0700230 // Has aliased heap locations in the method, due to either the
231 // reference is aliased or the array element is aliased via different
232 // index names.
233 bool has_aliased_locations_;
234
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100235 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
236};
237
238// A HeapLocationCollector collects all relevant heap locations and keeps
239// an aliasing matrix for all locations.
240class HeapLocationCollector : public HGraphVisitor {
241 public:
242 static constexpr size_t kHeapLocationNotFound = -1;
243 // Start with a single uint32_t word. That's enough bits for pair-wise
244 // aliasing matrix of 8 heap locations.
245 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
246
Alex Light86fe9b82020-11-16 16:54:01 +0000247 HeapLocationCollector(HGraph* graph,
248 ScopedArenaAllocator* allocator,
Alex Light3a73ffb2021-01-25 14:11:05 +0000249 LoadStoreAnalysisType lse_type)
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100250 : HGraphVisitor(graph),
Vladimir Markoef898422020-06-08 10:26:06 +0100251 allocator_(allocator),
252 ref_info_array_(allocator->Adapter(kArenaAllocLSA)),
253 heap_locations_(allocator->Adapter(kArenaAllocLSA)),
Alex Light86fe9b82020-11-16 16:54:01 +0000254 aliasing_matrix_(allocator, kInitialAliasingMatrixBitVectorSize, true, kArenaAllocLSA),
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100255 has_heap_stores_(false),
256 has_volatile_(false),
Alex Light86fe9b82020-11-16 16:54:01 +0000257 has_monitor_operations_(false),
Alex Light3a73ffb2021-01-25 14:11:05 +0000258 lse_type_(lse_type) {
Vladimir Markoef898422020-06-08 10:26:06 +0100259 aliasing_matrix_.ClearAllBits();
260 }
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100261
Alex Light86fe9b82020-11-16 16:54:01 +0000262 ~HeapLocationCollector() {
263 CleanUp();
264 }
265
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100266 void CleanUp() {
267 heap_locations_.clear();
Alex Light86fe9b82020-11-16 16:54:01 +0000268 STLDeleteContainerPointers(ref_info_array_.begin(), ref_info_array_.end());
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100269 ref_info_array_.clear();
270 }
271
Vladimir Marko5c824932021-06-02 15:54:17 +0100272 size_t CountPartialSingletons() const {
273 return std::count_if(ref_info_array_.begin(),
274 ref_info_array_.end(),
275 [](ReferenceInfo* ri) { return ri->IsPartialSingleton(); });
Alex Light3a73ffb2021-01-25 14:11:05 +0000276 }
277
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100278 size_t GetNumberOfHeapLocations() const {
279 return heap_locations_.size();
280 }
281
282 HeapLocation* GetHeapLocation(size_t index) const {
283 return heap_locations_[index];
284 }
285
Alex Light3a73ffb2021-01-25 14:11:05 +0000286 size_t GetHeapLocationIndex(const HeapLocation* hl) const {
287 auto res = std::find(heap_locations_.cbegin(), heap_locations_.cend(), hl);
288 return std::distance(heap_locations_.cbegin(), res);
289 }
290
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100291 HInstruction* HuntForOriginalReference(HInstruction* ref) const {
xueliang.zhonge0eb4832017-10-30 13:43:14 +0000292 // An original reference can be transformed by instructions like:
293 // i0 NewArray
294 // i1 HInstruction(i0) <-- NullCheck, BoundType, IntermediateAddress.
295 // i2 ArrayGet(i1, index)
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100296 DCHECK(ref != nullptr);
xueliang.zhonge0eb4832017-10-30 13:43:14 +0000297 while (ref->IsNullCheck() || ref->IsBoundType() || ref->IsIntermediateAddress()) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100298 ref = ref->InputAt(0);
299 }
300 return ref;
301 }
302
303 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
304 for (size_t i = 0; i < ref_info_array_.size(); i++) {
305 ReferenceInfo* ref_info = ref_info_array_[i];
306 if (ref_info->GetReference() == ref) {
307 DCHECK_EQ(i, ref_info->GetPosition());
308 return ref_info;
309 }
310 }
311 return nullptr;
312 }
313
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100314 size_t GetFieldHeapLocation(HInstruction* object, const FieldInfo* field) const {
315 DCHECK(object != nullptr);
316 DCHECK(field != nullptr);
317 return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(object)),
Aart Bikb765a3f2018-05-10 14:47:48 -0700318 field->GetFieldType(),
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100319 field->GetFieldOffset().SizeValue(),
320 nullptr,
321 HeapLocation::kScalar,
322 field->GetDeclaringClassDefIndex());
323 }
324
Aart Bikb765a3f2018-05-10 14:47:48 -0700325 size_t GetArrayHeapLocation(HInstruction* instruction) const {
326 DCHECK(instruction != nullptr);
327 HInstruction* array = instruction->InputAt(0);
328 HInstruction* index = instruction->InputAt(1);
329 DataType::Type type = instruction->GetType();
330 size_t vector_length = HeapLocation::kScalar;
331 if (instruction->IsArraySet()) {
332 type = instruction->AsArraySet()->GetComponentType();
333 } else if (instruction->IsVecStore() ||
334 instruction->IsVecLoad()) {
335 HVecOperation* vec_op = instruction->AsVecOperation();
336 type = vec_op->GetPackedType();
337 vector_length = vec_op->GetVectorLength();
338 } else {
339 DCHECK(instruction->IsArrayGet());
340 }
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100341 return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(array)),
Aart Bikb765a3f2018-05-10 14:47:48 -0700342 type,
xueliang.zhong016c0f12017-05-12 18:16:31 +0100343 HeapLocation::kInvalidFieldOffset,
344 index,
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100345 vector_length,
xueliang.zhong016c0f12017-05-12 18:16:31 +0100346 HeapLocation::kDeclaringClassDefIndexForArrays);
347 }
348
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100349 bool HasHeapStores() const {
350 return has_heap_stores_;
351 }
352
353 bool HasVolatile() const {
354 return has_volatile_;
355 }
356
357 bool HasMonitorOps() const {
358 return has_monitor_operations_;
359 }
360
361 // Find and return the heap location index in heap_locations_.
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100362 // NOTE: When heap locations are created, potentially aliasing/overlapping
363 // accesses are given different indexes. This find function also
364 // doesn't take aliasing/overlapping into account. For example,
365 // this function returns three different indexes for:
366 // - ref_info=array, index=i, vector_length=kScalar;
367 // - ref_info=array, index=i, vector_length=2;
368 // - ref_info=array, index=i, vector_length=4;
369 // In later analysis, ComputeMayAlias() and MayAlias() compute and tell whether
370 // these indexes alias.
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100371 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
Aart Bikb765a3f2018-05-10 14:47:48 -0700372 DataType::Type type,
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100373 size_t offset,
374 HInstruction* index,
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100375 size_t vector_length,
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100376 int16_t declaring_class_def_index) const {
Aart Bikb765a3f2018-05-10 14:47:48 -0700377 DataType::Type lookup_type = DataType::ToSigned(type);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100378 for (size_t i = 0; i < heap_locations_.size(); i++) {
379 HeapLocation* loc = heap_locations_[i];
380 if (loc->GetReferenceInfo() == ref_info &&
Aart Bikb765a3f2018-05-10 14:47:48 -0700381 loc->GetType() == lookup_type &&
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100382 loc->GetOffset() == offset &&
383 loc->GetIndex() == index &&
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100384 loc->GetVectorLength() == vector_length &&
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100385 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
386 return i;
387 }
388 }
389 return kHeapLocationNotFound;
390 }
391
Alex Light86fe9b82020-11-16 16:54:01 +0000392 bool InstructionEligibleForLSERemoval(HInstruction* inst) const;
393
394 // Get some estimated statistics based on our analysis.
395 void DumpReferenceStats(OptimizingCompilerStats* stats);
396
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100397 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
398 bool MayAlias(size_t index1, size_t index2) const {
399 if (index1 < index2) {
400 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
401 } else if (index1 > index2) {
402 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
403 } else {
404 DCHECK(false) << "index1 and index2 are expected to be different";
405 return true;
406 }
407 }
408
409 void BuildAliasingMatrix() {
410 const size_t number_of_locations = heap_locations_.size();
411 if (number_of_locations == 0) {
412 return;
413 }
414 size_t pos = 0;
415 // Compute aliasing info between every pair of different heap locations.
416 // Save the result in a matrix represented as a BitVector.
417 for (size_t i = 0; i < number_of_locations - 1; i++) {
418 for (size_t j = i + 1; j < number_of_locations; j++) {
419 if (ComputeMayAlias(i, j)) {
420 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
421 }
422 pos++;
423 }
424 }
425 }
426
Vladimir Markodac82392021-05-10 15:44:24 +0000427 static bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100428 if (ref_info1 == ref_info2) {
429 return true;
430 } else if (ref_info1->IsSingleton()) {
431 return false;
432 } else if (ref_info2->IsSingleton()) {
433 return false;
434 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
435 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
436 return false;
437 }
438 return true;
439 }
440
Vladimir Markodac82392021-05-10 15:44:24 +0000441 private:
442 // An allocation cannot alias with a name which already exists at the point
443 // of the allocation, such as a parameter or a load happening before the allocation.
444 static bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) {
445 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
446 // Any reference that can alias with the allocation must appear after it in the block/in
447 // the block's successors. In reverse post order, those instructions will be visited after
448 // the allocation.
449 return ref_info2->GetPosition() >= ref_info1->GetPosition();
450 }
451 return true;
452 }
453
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100454 bool CanArrayElementsAlias(const HInstruction* idx1,
455 const size_t vector_length1,
456 const HInstruction* idx2,
457 const size_t vector_length2) const;
xueliang.zhong016c0f12017-05-12 18:16:31 +0100458
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100459 // `index1` and `index2` are indices in the array of collected heap locations.
460 // Returns the position in the bit vector that tracks whether the two heap
461 // locations may alias.
462 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
463 DCHECK(index2 > index1);
464 const size_t number_of_locations = heap_locations_.size();
465 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
466 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
467 }
468
469 // An additional position is passed in to make sure the calculated position is correct.
470 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
471 size_t calculated_position = AliasingMatrixPosition(index1, index2);
472 DCHECK_EQ(calculated_position, position);
473 return calculated_position;
474 }
475
476 // Compute if two locations may alias to each other.
477 bool ComputeMayAlias(size_t index1, size_t index2) const {
Mingyao Yang0e3151b2017-10-30 11:19:57 -0700478 DCHECK_NE(index1, index2);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100479 HeapLocation* loc1 = heap_locations_[index1];
480 HeapLocation* loc2 = heap_locations_[index2];
481 if (loc1->GetOffset() != loc2->GetOffset()) {
482 // Either two different instance fields, or one is an instance
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100483 // field and the other is an array data.
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100484 return false;
485 }
486 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
487 // Different types.
488 return false;
489 }
490 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
491 return false;
492 }
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100493 if (loc1->IsArray() && loc2->IsArray()) {
494 HInstruction* idx1 = loc1->GetIndex();
495 HInstruction* idx2 = loc2->GetIndex();
496 size_t vector_length1 = loc1->GetVectorLength();
497 size_t vector_length2 = loc2->GetVectorLength();
498 if (!CanArrayElementsAlias(idx1, vector_length1, idx2, vector_length2)) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100499 return false;
500 }
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100501 }
Mingyao Yang0e3151b2017-10-30 11:19:57 -0700502 loc1->SetHasAliasedLocations(true);
503 loc2->SetHasAliasedLocations(true);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100504 return true;
505 }
506
507 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
508 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
509 if (ref_info == nullptr) {
510 size_t pos = ref_info_array_.size();
Alex Light3a73ffb2021-01-25 14:11:05 +0000511 ref_info = new (allocator_) ReferenceInfo(instruction, allocator_, pos, lse_type_);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100512 ref_info_array_.push_back(ref_info);
513 }
514 return ref_info;
515 }
516
517 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100518 if (instruction->GetType() != DataType::Type::kReference) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100519 return;
520 }
521 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
522 GetOrCreateReferenceInfo(instruction);
523 }
524
Vladimir Marko3224f382020-06-23 14:19:53 +0100525 void MaybeCreateHeapLocation(HInstruction* ref,
526 DataType::Type type,
527 size_t offset,
528 HInstruction* index,
529 size_t vector_length,
530 int16_t declaring_class_def_index) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100531 HInstruction* original_ref = HuntForOriginalReference(ref);
532 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
533 size_t heap_location_idx = FindHeapLocationIndex(
Aart Bikb765a3f2018-05-10 14:47:48 -0700534 ref_info, type, offset, index, vector_length, declaring_class_def_index);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100535 if (heap_location_idx == kHeapLocationNotFound) {
Vladimir Markoef898422020-06-08 10:26:06 +0100536 HeapLocation* heap_loc = new (allocator_)
Aart Bikb765a3f2018-05-10 14:47:48 -0700537 HeapLocation(ref_info, type, offset, index, vector_length, declaring_class_def_index);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100538 heap_locations_.push_back(heap_loc);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100539 }
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100540 }
541
Vladimir Marko3224f382020-06-23 14:19:53 +0100542 void VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100543 if (field_info.IsVolatile()) {
544 has_volatile_ = true;
545 }
Aart Bikb765a3f2018-05-10 14:47:48 -0700546 DataType::Type type = field_info.GetFieldType();
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100547 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
548 const size_t offset = field_info.GetFieldOffset().SizeValue();
Vladimir Marko3224f382020-06-23 14:19:53 +0100549 MaybeCreateHeapLocation(ref,
550 type,
551 offset,
552 nullptr,
553 HeapLocation::kScalar,
554 declaring_class_def_index);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100555 }
556
Aart Bikb765a3f2018-05-10 14:47:48 -0700557 void VisitArrayAccess(HInstruction* array,
558 HInstruction* index,
559 DataType::Type type,
560 size_t vector_length) {
Vladimir Marko3224f382020-06-23 14:19:53 +0100561 MaybeCreateHeapLocation(array,
Aart Bikb765a3f2018-05-10 14:47:48 -0700562 type,
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100563 HeapLocation::kInvalidFieldOffset,
564 index,
565 vector_length,
566 HeapLocation::kDeclaringClassDefIndexForArrays);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100567 }
568
Alex Light3a73ffb2021-01-25 14:11:05 +0000569 void VisitPredicatedInstanceFieldGet(HPredicatedInstanceFieldGet* instruction) override {
Vladimir Marko06fb7fa2021-05-18 15:53:17 +0000570 VisitFieldAccess(instruction->GetTarget(), instruction->GetFieldInfo());
Alex Light3a73ffb2021-01-25 14:11:05 +0000571 CreateReferenceInfoForReferenceType(instruction);
572 }
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100573 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100574 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
575 CreateReferenceInfoForReferenceType(instruction);
576 }
577
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100578 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) override {
Vladimir Marko3224f382020-06-23 14:19:53 +0100579 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100580 has_heap_stores_ = true;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100581 }
582
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100583 void VisitStaticFieldGet(HStaticFieldGet* instruction) override {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100584 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
585 CreateReferenceInfoForReferenceType(instruction);
586 }
587
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100588 void VisitStaticFieldSet(HStaticFieldSet* instruction) override {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100589 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
590 has_heap_stores_ = true;
591 }
592
593 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
594 // since we cannot accurately track the fields.
595
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100596 void VisitArrayGet(HArrayGet* instruction) override {
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100597 HInstruction* array = instruction->InputAt(0);
598 HInstruction* index = instruction->InputAt(1);
Aart Bikb765a3f2018-05-10 14:47:48 -0700599 DataType::Type type = instruction->GetType();
600 VisitArrayAccess(array, index, type, HeapLocation::kScalar);
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100601 CreateReferenceInfoForReferenceType(instruction);
602 }
603
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100604 void VisitArraySet(HArraySet* instruction) override {
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100605 HInstruction* array = instruction->InputAt(0);
606 HInstruction* index = instruction->InputAt(1);
Aart Bikb765a3f2018-05-10 14:47:48 -0700607 DataType::Type type = instruction->GetComponentType();
608 VisitArrayAccess(array, index, type, HeapLocation::kScalar);
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100609 has_heap_stores_ = true;
610 }
611
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100612 void VisitVecLoad(HVecLoad* instruction) override {
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100613 HInstruction* array = instruction->InputAt(0);
614 HInstruction* index = instruction->InputAt(1);
Aart Bikb765a3f2018-05-10 14:47:48 -0700615 DataType::Type type = instruction->GetPackedType();
616 VisitArrayAccess(array, index, type, instruction->GetVectorLength());
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100617 CreateReferenceInfoForReferenceType(instruction);
618 }
619
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100620 void VisitVecStore(HVecStore* instruction) override {
xueliang.zhongb50b16a2017-09-19 17:43:29 +0100621 HInstruction* array = instruction->InputAt(0);
622 HInstruction* index = instruction->InputAt(1);
Aart Bikb765a3f2018-05-10 14:47:48 -0700623 DataType::Type type = instruction->GetPackedType();
624 VisitArrayAccess(array, index, type, instruction->GetVectorLength());
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100625 has_heap_stores_ = true;
626 }
627
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100628 void VisitInstruction(HInstruction* instruction) override {
Mingyao Yangfef28842017-08-28 15:20:57 -0700629 // Any new-instance or new-array cannot alias with references that
630 // pre-exist the new-instance/new-array. We append entries into
631 // ref_info_array_ which keeps track of the order of creation
632 // of reference values since we visit the blocks in reverse post order.
633 //
634 // By default, VisitXXX() (including VisitPhi()) calls VisitInstruction(),
635 // unless VisitXXX() is overridden. VisitInstanceFieldGet() etc. above
636 // also call CreateReferenceInfoForReferenceType() explicitly.
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100637 CreateReferenceInfoForReferenceType(instruction);
638 }
639
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100640 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) override {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100641 has_monitor_operations_ = true;
642 }
643
Vladimir Markoef898422020-06-08 10:26:06 +0100644 ScopedArenaAllocator* allocator_;
645 ScopedArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
646 ScopedArenaVector<HeapLocation*> heap_locations_; // All heap locations.
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100647 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
648 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
649 // alias analysis and won't be as effective.
650 bool has_volatile_; // If there are volatile field accesses.
651 bool has_monitor_operations_; // If there are monitor operations.
Alex Light3a73ffb2021-01-25 14:11:05 +0000652 LoadStoreAnalysisType lse_type_;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100653
654 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
655};
656
Vladimir Markoef898422020-06-08 10:26:06 +0100657class LoadStoreAnalysis {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100658 public:
Alex Light86fe9b82020-11-16 16:54:01 +0000659 // for_elimination controls whether we should keep track of escapes at a per-block level for
660 // partial LSE.
661 explicit LoadStoreAnalysis(HGraph* graph,
662 OptimizingCompilerStats* stats,
663 ScopedArenaAllocator* local_allocator,
Alex Light3a73ffb2021-01-25 14:11:05 +0000664 LoadStoreAnalysisType lse_type)
Alex Light86fe9b82020-11-16 16:54:01 +0000665 : graph_(graph),
666 stats_(stats),
667 heap_location_collector_(
668 graph,
669 local_allocator,
Alex Light3a73ffb2021-01-25 14:11:05 +0000670 ExecutionSubgraph::CanAnalyse(graph_) ? lse_type : LoadStoreAnalysisType::kBasic) {}
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100671
672 const HeapLocationCollector& GetHeapLocationCollector() const {
673 return heap_location_collector_;
674 }
675
Vladimir Markoef898422020-06-08 10:26:06 +0100676 bool Run();
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100677
678 private:
Vladimir Markoef898422020-06-08 10:26:06 +0100679 HGraph* graph_;
Alex Light86fe9b82020-11-16 16:54:01 +0000680 OptimizingCompilerStats* stats_;
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100681 HeapLocationCollector heap_location_collector_;
682
683 DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis);
684};
685
686} // namespace art
687
688#endif // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_