blob: 4e940f30bf7eb5804b199f749b52309f568d960f [file] [log] [blame]
xueliang.zhongc239a2b2017-04-27 15:31:37 +01001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
18#define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
19
20#include "escape.h"
21#include "nodes.h"
22#include "optimization.h"
23
24namespace art {
25
26// A ReferenceInfo contains additional info about a reference such as
27// whether it's a singleton, returned, etc.
28class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
29 public:
30 ReferenceInfo(HInstruction* reference, size_t pos)
31 : reference_(reference),
32 position_(pos),
33 is_singleton_(true),
34 is_singleton_and_not_returned_(true),
35 is_singleton_and_not_deopt_visible_(true),
36 has_index_aliasing_(false) {
37 CalculateEscape(reference_,
38 nullptr,
39 &is_singleton_,
40 &is_singleton_and_not_returned_,
41 &is_singleton_and_not_deopt_visible_);
42 }
43
44 HInstruction* GetReference() const {
45 return reference_;
46 }
47
48 size_t GetPosition() const {
49 return position_;
50 }
51
52 // Returns true if reference_ is the only name that can refer to its value during
53 // the lifetime of the method. So it's guaranteed to not have any alias in
54 // the method (including its callees).
55 bool IsSingleton() const {
56 return is_singleton_;
57 }
58
59 // Returns true if reference_ is a singleton and not returned to the caller or
60 // used as an environment local of an HDeoptimize instruction.
61 // The allocation and stores into reference_ may be eliminated for such cases.
62 bool IsSingletonAndRemovable() const {
63 return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
64 }
65
66 // Returns true if reference_ is a singleton and returned to the caller or
67 // used as an environment local of an HDeoptimize instruction.
68 bool IsSingletonAndNonRemovable() const {
69 return is_singleton_ &&
70 (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
71 }
72
73 bool HasIndexAliasing() {
74 return has_index_aliasing_;
75 }
76
77 void SetHasIndexAliasing(bool has_index_aliasing) {
78 // Only allow setting to true.
79 DCHECK(has_index_aliasing);
80 has_index_aliasing_ = has_index_aliasing;
81 }
82
83 private:
84 HInstruction* const reference_;
85 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
86
87 // Can only be referred to by a single name in the method.
88 bool is_singleton_;
89 // Is singleton and not returned to caller.
90 bool is_singleton_and_not_returned_;
91 // Is singleton and not used as an environment local of HDeoptimize.
92 bool is_singleton_and_not_deopt_visible_;
93 // Some heap locations with reference_ have array index aliasing,
94 // e.g. arr[i] and arr[j] may be the same location.
95 bool has_index_aliasing_;
96
97 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
98};
99
100// A heap location is a reference-offset/index pair that a value can be loaded from
101// or stored to.
102class HeapLocation : public ArenaObject<kArenaAllocMisc> {
103 public:
104 static constexpr size_t kInvalidFieldOffset = -1;
105
106 // TODO: more fine-grained array types.
107 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
108
109 HeapLocation(ReferenceInfo* ref_info,
110 size_t offset,
111 HInstruction* index,
112 int16_t declaring_class_def_index)
113 : ref_info_(ref_info),
114 offset_(offset),
115 index_(index),
116 declaring_class_def_index_(declaring_class_def_index),
117 value_killed_by_loop_side_effects_(true) {
118 DCHECK(ref_info != nullptr);
119 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
120 (offset != kInvalidFieldOffset && index == nullptr));
121 if (ref_info->IsSingleton() && !IsArrayElement()) {
122 // Assume this location's value cannot be killed by loop side effects
123 // until proven otherwise.
124 value_killed_by_loop_side_effects_ = false;
125 }
126 }
127
128 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
129 size_t GetOffset() const { return offset_; }
130 HInstruction* GetIndex() const { return index_; }
131
132 // Returns the definition of declaring class' dex index.
133 // It's kDeclaringClassDefIndexForArrays for an array element.
134 int16_t GetDeclaringClassDefIndex() const {
135 return declaring_class_def_index_;
136 }
137
138 bool IsArrayElement() const {
139 return index_ != nullptr;
140 }
141
142 bool IsValueKilledByLoopSideEffects() const {
143 return value_killed_by_loop_side_effects_;
144 }
145
146 void SetValueKilledByLoopSideEffects(bool val) {
147 value_killed_by_loop_side_effects_ = val;
148 }
149
150 private:
151 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
152 const size_t offset_; // offset of static/instance field.
153 HInstruction* const index_; // index of an array element.
154 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
155 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
156 // side effects because this location is stored
157 // into inside a loop. This gives
158 // better info on whether a singleton's location
159 // value may be killed by loop side effects.
160
161 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
162};
163
164// A HeapLocationCollector collects all relevant heap locations and keeps
165// an aliasing matrix for all locations.
166class HeapLocationCollector : public HGraphVisitor {
167 public:
168 static constexpr size_t kHeapLocationNotFound = -1;
169 // Start with a single uint32_t word. That's enough bits for pair-wise
170 // aliasing matrix of 8 heap locations.
171 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
172
173 explicit HeapLocationCollector(HGraph* graph)
174 : HGraphVisitor(graph),
175 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
176 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
177 aliasing_matrix_(graph->GetArena(),
178 kInitialAliasingMatrixBitVectorSize,
179 true,
180 kArenaAllocLSE),
181 has_heap_stores_(false),
182 has_volatile_(false),
183 has_monitor_operations_(false) {}
184
185 void CleanUp() {
186 heap_locations_.clear();
187 ref_info_array_.clear();
188 }
189
190 size_t GetNumberOfHeapLocations() const {
191 return heap_locations_.size();
192 }
193
194 HeapLocation* GetHeapLocation(size_t index) const {
195 return heap_locations_[index];
196 }
197
198 HInstruction* HuntForOriginalReference(HInstruction* ref) const {
199 DCHECK(ref != nullptr);
200 while (ref->IsNullCheck() || ref->IsBoundType()) {
201 ref = ref->InputAt(0);
202 }
203 return ref;
204 }
205
206 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
207 for (size_t i = 0; i < ref_info_array_.size(); i++) {
208 ReferenceInfo* ref_info = ref_info_array_[i];
209 if (ref_info->GetReference() == ref) {
210 DCHECK_EQ(i, ref_info->GetPosition());
211 return ref_info;
212 }
213 }
214 return nullptr;
215 }
216
217 bool HasHeapStores() const {
218 return has_heap_stores_;
219 }
220
221 bool HasVolatile() const {
222 return has_volatile_;
223 }
224
225 bool HasMonitorOps() const {
226 return has_monitor_operations_;
227 }
228
229 // Find and return the heap location index in heap_locations_.
230 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
231 size_t offset,
232 HInstruction* index,
233 int16_t declaring_class_def_index) const {
234 for (size_t i = 0; i < heap_locations_.size(); i++) {
235 HeapLocation* loc = heap_locations_[i];
236 if (loc->GetReferenceInfo() == ref_info &&
237 loc->GetOffset() == offset &&
238 loc->GetIndex() == index &&
239 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
240 return i;
241 }
242 }
243 return kHeapLocationNotFound;
244 }
245
246 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
247 bool MayAlias(size_t index1, size_t index2) const {
248 if (index1 < index2) {
249 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
250 } else if (index1 > index2) {
251 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
252 } else {
253 DCHECK(false) << "index1 and index2 are expected to be different";
254 return true;
255 }
256 }
257
258 void BuildAliasingMatrix() {
259 const size_t number_of_locations = heap_locations_.size();
260 if (number_of_locations == 0) {
261 return;
262 }
263 size_t pos = 0;
264 // Compute aliasing info between every pair of different heap locations.
265 // Save the result in a matrix represented as a BitVector.
266 for (size_t i = 0; i < number_of_locations - 1; i++) {
267 for (size_t j = i + 1; j < number_of_locations; j++) {
268 if (ComputeMayAlias(i, j)) {
269 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
270 }
271 pos++;
272 }
273 }
274 }
275
276 private:
277 // An allocation cannot alias with a name which already exists at the point
278 // of the allocation, such as a parameter or a load happening before the allocation.
279 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
280 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
281 // Any reference that can alias with the allocation must appear after it in the block/in
282 // the block's successors. In reverse post order, those instructions will be visited after
283 // the allocation.
284 return ref_info2->GetPosition() >= ref_info1->GetPosition();
285 }
286 return true;
287 }
288
289 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
290 if (ref_info1 == ref_info2) {
291 return true;
292 } else if (ref_info1->IsSingleton()) {
293 return false;
294 } else if (ref_info2->IsSingleton()) {
295 return false;
296 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
297 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
298 return false;
299 }
300 return true;
301 }
302
303 // `index1` and `index2` are indices in the array of collected heap locations.
304 // Returns the position in the bit vector that tracks whether the two heap
305 // locations may alias.
306 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
307 DCHECK(index2 > index1);
308 const size_t number_of_locations = heap_locations_.size();
309 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
310 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
311 }
312
313 // An additional position is passed in to make sure the calculated position is correct.
314 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
315 size_t calculated_position = AliasingMatrixPosition(index1, index2);
316 DCHECK_EQ(calculated_position, position);
317 return calculated_position;
318 }
319
320 // Compute if two locations may alias to each other.
321 bool ComputeMayAlias(size_t index1, size_t index2) const {
322 HeapLocation* loc1 = heap_locations_[index1];
323 HeapLocation* loc2 = heap_locations_[index2];
324 if (loc1->GetOffset() != loc2->GetOffset()) {
325 // Either two different instance fields, or one is an instance
326 // field and the other is an array element.
327 return false;
328 }
329 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
330 // Different types.
331 return false;
332 }
333 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
334 return false;
335 }
336 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
337 HInstruction* array_index1 = loc1->GetIndex();
338 HInstruction* array_index2 = loc2->GetIndex();
339 DCHECK(array_index1 != nullptr);
340 DCHECK(array_index2 != nullptr);
341 if (array_index1->IsIntConstant() &&
342 array_index2->IsIntConstant() &&
343 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
344 // Different constant indices do not alias.
345 return false;
346 }
347 ReferenceInfo* ref_info = loc1->GetReferenceInfo();
348 ref_info->SetHasIndexAliasing(true);
349 }
350 return true;
351 }
352
353 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
354 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
355 if (ref_info == nullptr) {
356 size_t pos = ref_info_array_.size();
357 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
358 ref_info_array_.push_back(ref_info);
359 }
360 return ref_info;
361 }
362
363 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
364 if (instruction->GetType() != Primitive::kPrimNot) {
365 return;
366 }
367 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
368 GetOrCreateReferenceInfo(instruction);
369 }
370
371 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
372 size_t offset,
373 HInstruction* index,
374 int16_t declaring_class_def_index) {
375 HInstruction* original_ref = HuntForOriginalReference(ref);
376 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
377 size_t heap_location_idx = FindHeapLocationIndex(
378 ref_info, offset, index, declaring_class_def_index);
379 if (heap_location_idx == kHeapLocationNotFound) {
380 HeapLocation* heap_loc = new (GetGraph()->GetArena())
381 HeapLocation(ref_info, offset, index, declaring_class_def_index);
382 heap_locations_.push_back(heap_loc);
383 return heap_loc;
384 }
385 return heap_locations_[heap_location_idx];
386 }
387
388 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
389 if (field_info.IsVolatile()) {
390 has_volatile_ = true;
391 }
392 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
393 const size_t offset = field_info.GetFieldOffset().SizeValue();
394 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
395 }
396
397 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
398 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
399 index, HeapLocation::kDeclaringClassDefIndexForArrays);
400 }
401
402 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
403 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
404 CreateReferenceInfoForReferenceType(instruction);
405 }
406
407 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
408 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
409 has_heap_stores_ = true;
410 if (location->GetReferenceInfo()->IsSingleton()) {
411 // A singleton's location value may be killed by loop side effects if it's
412 // defined before that loop, and it's stored into inside that loop.
413 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
414 if (loop_info != nullptr) {
415 HInstruction* ref = location->GetReferenceInfo()->GetReference();
416 DCHECK(ref->IsNewInstance());
417 if (loop_info->IsDefinedOutOfTheLoop(ref)) {
418 // ref's location value may be killed by this loop's side effects.
419 location->SetValueKilledByLoopSideEffects(true);
420 } else {
421 // ref is defined inside this loop so this loop's side effects cannot
422 // kill its location value at the loop header since ref/its location doesn't
423 // exist yet at the loop header.
424 }
425 }
426 } else {
427 // For non-singletons, value_killed_by_loop_side_effects_ is inited to
428 // true.
429 DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
430 }
431 }
432
433 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
434 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
435 CreateReferenceInfoForReferenceType(instruction);
436 }
437
438 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
439 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
440 has_heap_stores_ = true;
441 }
442
443 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
444 // since we cannot accurately track the fields.
445
446 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
447 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
448 CreateReferenceInfoForReferenceType(instruction);
449 }
450
451 void VisitArraySet(HArraySet* instruction) OVERRIDE {
452 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
453 has_heap_stores_ = true;
454 }
455
456 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
457 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
458 CreateReferenceInfoForReferenceType(new_instance);
459 }
460
461 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
462 CreateReferenceInfoForReferenceType(instruction);
463 }
464
465 void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
466 CreateReferenceInfoForReferenceType(instruction);
467 }
468
469 void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
470 CreateReferenceInfoForReferenceType(instruction);
471 }
472
473 void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
474 CreateReferenceInfoForReferenceType(instruction);
475 }
476
477 void VisitSelect(HSelect* instruction) OVERRIDE {
478 CreateReferenceInfoForReferenceType(instruction);
479 }
480
481 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
482 has_monitor_operations_ = true;
483 }
484
485 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
486 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
487 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
488 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
489 // alias analysis and won't be as effective.
490 bool has_volatile_; // If there are volatile field accesses.
491 bool has_monitor_operations_; // If there are monitor operations.
492
493 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
494};
495
496class LoadStoreAnalysis : public HOptimization {
497 public:
498 explicit LoadStoreAnalysis(HGraph* graph)
499 : HOptimization(graph, kLoadStoreAnalysisPassName),
500 heap_location_collector_(graph) {}
501
502 const HeapLocationCollector& GetHeapLocationCollector() const {
503 return heap_location_collector_;
504 }
505
506 void Run() OVERRIDE;
507
508 static constexpr const char* kLoadStoreAnalysisPassName = "load_store_analysis";
509
510 private:
511 HeapLocationCollector heap_location_collector_;
512
513 DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis);
514};
515
516} // namespace art
517
518#endif // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_