buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_ |
| 18 | #define ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_ |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 19 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 20 | #include <memory> |
| 21 | |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 22 | #include "compiler_internals.h" |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 23 | #include "global_value_numbering.h" |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 24 | #include "utils/scoped_arena_allocator.h" |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 25 | #include "utils/scoped_arena_containers.h" |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 26 | |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 27 | namespace art { |
| 28 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 29 | class DexFile; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 30 | |
| 31 | // Enable/disable tracking values stored in the FILLED_NEW_ARRAY result. |
| 32 | static constexpr bool kLocalValueNumberingEnableFilledNewArrayTracking = true; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 33 | |
buzbee | 311ca16 | 2013-02-28 15:56:43 -0800 | [diff] [blame] | 34 | class LocalValueNumbering { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 35 | private: |
| 36 | static constexpr uint16_t kNoValue = GlobalValueNumbering::kNoValue; |
| 37 | |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 38 | public: |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 39 | LocalValueNumbering(GlobalValueNumbering* gvn, BasicBlockId id, ScopedArenaAllocator* allocator); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 40 | |
| 41 | BasicBlockId Id() const { |
| 42 | return id_; |
| 43 | } |
| 44 | |
| 45 | bool Equals(const LocalValueNumbering& other) const; |
| 46 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 47 | bool IsValueNullChecked(uint16_t value_name) const { |
| 48 | return null_checked_.find(value_name) != null_checked_.end(); |
| 49 | } |
| 50 | |
| 51 | bool IsSregValue(uint16_t s_reg, uint16_t value_name) const { |
| 52 | auto it = sreg_value_map_.find(s_reg); |
| 53 | if (it != sreg_value_map_.end()) { |
| 54 | return it->second == value_name; |
| 55 | } else { |
| 56 | return gvn_->HasValue(kNoValue, s_reg, kNoValue, kNoValue, value_name); |
| 57 | } |
| 58 | } |
| 59 | |
| 60 | enum MergeType { |
| 61 | kNormalMerge, |
| 62 | kCatchMerge, |
| 63 | kReturnMerge, // RETURN or PHI+RETURN. Merge only sreg maps. |
| 64 | }; |
| 65 | |
| 66 | void MergeOne(const LocalValueNumbering& other, MergeType merge_type); |
| 67 | void Merge(MergeType merge_type); // Merge gvn_->merge_lvns_. |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 68 | void PrepareEntryBlock(); |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 69 | |
| 70 | uint16_t GetValueNumber(MIR* mir); |
| 71 | |
| 72 | // LocalValueNumbering should be allocated on the ArenaStack (or the native stack). |
| 73 | static void* operator new(size_t size, ScopedArenaAllocator* allocator) { |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 74 | return allocator->Alloc(sizeof(LocalValueNumbering), kArenaAllocMisc); |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 75 | } |
| 76 | |
| 77 | // Allow delete-expression to destroy a LocalValueNumbering object without deallocation. |
| 78 | static void operator delete(void* ptr) { UNUSED(ptr); } |
| 79 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 80 | private: |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 81 | // A set of value names. |
| 82 | typedef GlobalValueNumbering::ValueNameSet ValueNameSet; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 83 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 84 | // Field types correspond to the ordering of GET/PUT instructions; this order is the same |
| 85 | // for IGET, IPUT, SGET, SPUT, AGET and APUT: |
| 86 | // op 0 |
| 87 | // op_WIDE 1 |
| 88 | // op_OBJECT 2 |
| 89 | // op_BOOLEAN 3 |
| 90 | // op_BYTE 4 |
| 91 | // op_CHAR 5 |
| 92 | // op_SHORT 6 |
| 93 | static constexpr size_t kFieldTypeCount = 7; |
| 94 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 95 | // Key is s_reg, value is value name. |
| 96 | typedef ScopedArenaSafeMap<uint16_t, uint16_t> SregValueMap; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 97 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 98 | void SetOperandValueImpl(uint16_t s_reg, uint16_t value, SregValueMap* map) { |
| 99 | DCHECK_EQ(map->count(s_reg), 0u) << PrettyMethod(gvn_->cu_->method_idx, *gvn_->cu_->dex_file) |
| 100 | << " LVN id: " << id_ << ", s_reg: " << s_reg; |
| 101 | map->Put(s_reg, value); |
| 102 | } |
| 103 | |
| 104 | uint16_t GetOperandValueImpl(int s_reg, const SregValueMap* map) const { |
| 105 | uint16_t res = kNoValue; |
| 106 | auto lb = map->find(s_reg); |
| 107 | if (lb != map->end()) { |
| 108 | res = lb->second; |
| 109 | } else { |
| 110 | // Using the original value; s_reg refers to an input reg. |
| 111 | res = gvn_->LookupValue(kNoValue, s_reg, kNoValue, kNoValue); |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 112 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 113 | return res; |
| 114 | } |
| 115 | |
| 116 | void SetOperandValue(uint16_t s_reg, uint16_t value) { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 117 | DCHECK_EQ(sreg_wide_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 118 | SetOperandValueImpl(s_reg, value, &sreg_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 119 | } |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 120 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 121 | uint16_t GetOperandValue(int s_reg) const { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 122 | DCHECK_EQ(sreg_wide_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 123 | return GetOperandValueImpl(s_reg, &sreg_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 124 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 125 | |
| 126 | void SetOperandValueWide(uint16_t s_reg, uint16_t value) { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 127 | DCHECK_EQ(sreg_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 128 | SetOperandValueImpl(s_reg, value, &sreg_wide_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 129 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 130 | |
| 131 | uint16_t GetOperandValueWide(int s_reg) const { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 132 | DCHECK_EQ(sreg_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 133 | return GetOperandValueImpl(s_reg, &sreg_wide_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 134 | } |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 135 | |
| 136 | struct RangeCheckKey { |
| 137 | uint16_t array; |
| 138 | uint16_t index; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 139 | |
| 140 | // NOTE: Can't define this at namespace scope for a private struct. |
| 141 | bool operator==(const RangeCheckKey& other) const { |
| 142 | return array == other.array && index == other.index; |
| 143 | } |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 144 | }; |
| 145 | |
| 146 | struct RangeCheckKeyComparator { |
| 147 | bool operator()(const RangeCheckKey& lhs, const RangeCheckKey& rhs) const { |
| 148 | if (lhs.array != rhs.array) { |
| 149 | return lhs.array < rhs.array; |
| 150 | } |
| 151 | return lhs.index < rhs.index; |
| 152 | } |
| 153 | }; |
| 154 | |
| 155 | typedef ScopedArenaSet<RangeCheckKey, RangeCheckKeyComparator> RangeCheckSet; |
| 156 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 157 | // Maps instance field "location" (derived from base, field_id and type) to value name. |
| 158 | typedef ScopedArenaSafeMap<uint16_t, uint16_t> IFieldLocToValueMap; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 159 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 160 | // Maps static field id to value name |
| 161 | typedef ScopedArenaSafeMap<uint16_t, uint16_t> SFieldToValueMap; |
| 162 | |
| 163 | struct EscapedIFieldClobberKey { |
| 164 | uint16_t base; // Or array. |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 165 | uint16_t type; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 166 | uint16_t field_id; // None (kNoValue) for arrays and unresolved instance field stores. |
| 167 | |
| 168 | // NOTE: Can't define this at namespace scope for a private struct. |
| 169 | bool operator==(const EscapedIFieldClobberKey& other) const { |
| 170 | return base == other.base && type == other.type && field_id == other.field_id; |
| 171 | } |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 172 | }; |
| 173 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 174 | struct EscapedIFieldClobberKeyComparator { |
| 175 | bool operator()(const EscapedIFieldClobberKey& lhs, const EscapedIFieldClobberKey& rhs) const { |
| 176 | // Compare base first. This makes sequential iteration respect the order of base. |
| 177 | if (lhs.base != rhs.base) { |
| 178 | return lhs.base < rhs.base; |
| 179 | } |
| 180 | // Compare type second. This makes the type-clobber entries (field_id == kNoValue) last |
| 181 | // for given base and type and makes it easy to prune unnecessary entries when merging |
| 182 | // escaped_ifield_clobber_set_ from multiple LVNs. |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 183 | if (lhs.type != rhs.type) { |
| 184 | return lhs.type < rhs.type; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 185 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 186 | return lhs.field_id < rhs.field_id; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 187 | } |
| 188 | }; |
| 189 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 190 | typedef ScopedArenaSet<EscapedIFieldClobberKey, EscapedIFieldClobberKeyComparator> |
| 191 | EscapedIFieldClobberSet; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 192 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 193 | struct EscapedArrayClobberKey { |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 194 | uint16_t base; |
| 195 | uint16_t type; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 196 | |
| 197 | // NOTE: Can't define this at namespace scope for a private struct. |
| 198 | bool operator==(const EscapedArrayClobberKey& other) const { |
| 199 | return base == other.base && type == other.type; |
| 200 | } |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 201 | }; |
| 202 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 203 | struct EscapedArrayClobberKeyComparator { |
| 204 | bool operator()(const EscapedArrayClobberKey& lhs, const EscapedArrayClobberKey& rhs) const { |
| 205 | // Compare base first. This makes sequential iteration respect the order of base. |
| 206 | if (lhs.base != rhs.base) { |
| 207 | return lhs.base < rhs.base; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 208 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 209 | return lhs.type < rhs.type; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 210 | } |
| 211 | }; |
| 212 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 213 | // Clobber set for previously non-aliasing array refs that escaped. |
| 214 | typedef ScopedArenaSet<EscapedArrayClobberKey, EscapedArrayClobberKeyComparator> |
| 215 | EscapedArrayClobberSet; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 216 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 217 | // Known location values for an aliasing set. The set can be tied to one of: |
| 218 | // 1. Instance field. The locations are aliasing references used to access the field. |
| 219 | // 2. Non-aliasing array reference. The locations are indexes to the array. |
| 220 | // 3. Aliasing array type. The locations are (reference, index) pair ids assigned by GVN. |
| 221 | // In each case we keep track of the last stored value, if any, and the set of locations |
| 222 | // where it was stored. We also keep track of all values known for the current write state |
| 223 | // (load_value_map), which can be known either because they have been loaded since the last |
| 224 | // store or because they contained the last_stored_value before the store and thus could not |
| 225 | // have changed as a result. |
| 226 | struct AliasingValues { |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 227 | explicit AliasingValues(LocalValueNumbering* lvn) |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 228 | : memory_version_before_stores(kNoValue), |
| 229 | last_stored_value(kNoValue), |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 230 | store_loc_set(std::less<uint16_t>(), lvn->null_checked_.get_allocator()), |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 231 | last_load_memory_version(kNoValue), |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 232 | load_value_map(std::less<uint16_t>(), lvn->null_checked_.get_allocator()) { |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 233 | } |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 234 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 235 | uint16_t memory_version_before_stores; // kNoValue if start version for the field. |
| 236 | uint16_t last_stored_value; // Last stored value name, kNoValue if none. |
| 237 | ValueNameSet store_loc_set; // Where was last_stored_value stored. |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 238 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 239 | // Maps refs (other than stored_to) to currently known values for this field other. On write, |
| 240 | // anything that differs from the written value is removed as it may be overwritten. |
| 241 | uint16_t last_load_memory_version; // kNoValue if not known. |
| 242 | ScopedArenaSafeMap<uint16_t, uint16_t> load_value_map; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 243 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 244 | // NOTE: Can't define this at namespace scope for a private struct. |
| 245 | bool operator==(const AliasingValues& other) const { |
| 246 | return memory_version_before_stores == other.memory_version_before_stores && |
| 247 | last_load_memory_version == other.last_load_memory_version && |
| 248 | last_stored_value == other.last_stored_value && |
| 249 | store_loc_set == other.store_loc_set && |
| 250 | load_value_map == other.load_value_map; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 251 | } |
| 252 | }; |
| 253 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 254 | // Maps instance field id to AliasingValues, locations are object refs. |
| 255 | typedef ScopedArenaSafeMap<uint16_t, AliasingValues> AliasingIFieldValuesMap; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 256 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 257 | // Maps non-aliasing array reference to AliasingValues, locations are array indexes. |
| 258 | typedef ScopedArenaSafeMap<uint16_t, AliasingValues> NonAliasingArrayValuesMap; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 259 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 260 | // Maps aliasing array type to AliasingValues, locations are (array, index) pair ids. |
| 261 | typedef ScopedArenaSafeMap<uint16_t, AliasingValues> AliasingArrayValuesMap; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 262 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 263 | // Helper classes defining versions for updating and merging the AliasingValues maps above. |
| 264 | class AliasingIFieldVersions; |
| 265 | class NonAliasingArrayVersions; |
| 266 | class AliasingArrayVersions; |
| 267 | |
| 268 | template <typename Map> |
| 269 | AliasingValues* GetAliasingValues(Map* map, const typename Map::key_type& key); |
| 270 | |
| 271 | template <typename Versions, typename KeyType> |
| 272 | void UpdateAliasingValuesLoadVersion(const KeyType& key, AliasingValues* values); |
| 273 | |
| 274 | template <typename Versions, typename Map> |
| 275 | static uint16_t AliasingValuesMergeGet(GlobalValueNumbering* gvn, |
| 276 | const LocalValueNumbering* lvn, |
| 277 | Map* map, const typename Map::key_type& key, |
| 278 | uint16_t location); |
| 279 | |
| 280 | template <typename Versions, typename Map> |
| 281 | uint16_t HandleAliasingValuesGet(Map* map, const typename Map::key_type& key, |
| 282 | uint16_t location); |
| 283 | |
| 284 | template <typename Versions, typename Map> |
| 285 | bool HandleAliasingValuesPut(Map* map, const typename Map::key_type& key, |
| 286 | uint16_t location, uint16_t value); |
| 287 | |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 288 | template <typename K> |
| 289 | void CopyAliasingValuesMap(ScopedArenaSafeMap<K, AliasingValues>* dest, |
| 290 | const ScopedArenaSafeMap<K, AliasingValues>& src); |
| 291 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 292 | uint16_t MarkNonAliasingNonNull(MIR* mir); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 293 | bool IsNonAliasing(uint16_t reg) const; |
| 294 | bool IsNonAliasingIField(uint16_t reg, uint16_t field_id, uint16_t type) const; |
| 295 | bool IsNonAliasingArray(uint16_t reg, uint16_t type) const; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 296 | void HandleNullCheck(MIR* mir, uint16_t reg); |
| 297 | void HandleRangeCheck(MIR* mir, uint16_t array, uint16_t index); |
| 298 | void HandlePutObject(MIR* mir); |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 299 | void HandleEscapingRef(uint16_t base); |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 300 | void HandleInvokeArgs(const MIR* mir, const LocalValueNumbering* mir_lvn); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 301 | uint16_t HandlePhi(MIR* mir); |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 302 | uint16_t HandleAGet(MIR* mir, uint16_t opcode); |
| 303 | void HandleAPut(MIR* mir, uint16_t opcode); |
| 304 | uint16_t HandleIGet(MIR* mir, uint16_t opcode); |
| 305 | void HandleIPut(MIR* mir, uint16_t opcode); |
| 306 | uint16_t HandleSGet(MIR* mir, uint16_t opcode); |
| 307 | void HandleSPut(MIR* mir, uint16_t opcode); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 308 | void RemoveSFieldsForType(uint16_t type); |
Vladimir Marko | fa23645 | 2014-09-29 17:58:10 +0100 | [diff] [blame] | 309 | void HandleInvokeOrClInitOrAcquireOp(MIR* mir); |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 310 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 311 | bool SameMemoryVersion(const LocalValueNumbering& other) const; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 312 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 313 | uint16_t NewMemoryVersion(uint16_t* new_version); |
| 314 | void MergeMemoryVersions(bool clobbered_catch); |
| 315 | |
| 316 | void PruneNonAliasingRefsForCatch(); |
| 317 | |
| 318 | template <typename Set, Set LocalValueNumbering::* set_ptr> |
| 319 | void IntersectSets(); |
| 320 | |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 321 | void CopyLiveSregValues(SregValueMap* dest, const SregValueMap& src); |
| 322 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 323 | // Intersect maps as sets. The value type must be equality-comparable. |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 324 | template <SregValueMap LocalValueNumbering::* map_ptr> |
| 325 | void IntersectSregValueMaps(); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 326 | |
| 327 | // Intersect maps as sets. The value type must be equality-comparable. |
| 328 | template <typename Map> |
| 329 | static void InPlaceIntersectMaps(Map* work_map, const Map& other_map); |
| 330 | |
| 331 | template <typename Set, Set LocalValueNumbering::*set_ptr, void (LocalValueNumbering::*MergeFn)( |
| 332 | const typename Set::value_type& entry, typename Set::iterator hint)> |
| 333 | void MergeSets(); |
| 334 | |
| 335 | void IntersectAliasingValueLocations(AliasingValues* work_values, const AliasingValues* values); |
| 336 | |
| 337 | void MergeEscapedRefs(const ValueNameSet::value_type& entry, ValueNameSet::iterator hint); |
| 338 | void MergeEscapedIFieldTypeClobberSets(const EscapedIFieldClobberSet::value_type& entry, |
| 339 | EscapedIFieldClobberSet::iterator hint); |
| 340 | void MergeEscapedIFieldClobberSets(const EscapedIFieldClobberSet::value_type& entry, |
| 341 | EscapedIFieldClobberSet::iterator hint); |
| 342 | void MergeEscapedArrayClobberSets(const EscapedArrayClobberSet::value_type& entry, |
| 343 | EscapedArrayClobberSet::iterator hint); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 344 | void MergeSFieldValues(const SFieldToValueMap::value_type& entry, |
| 345 | SFieldToValueMap::iterator hint); |
| 346 | void MergeNonAliasingIFieldValues(const IFieldLocToValueMap::value_type& entry, |
| 347 | IFieldLocToValueMap::iterator hint); |
Vladimir Marko | 2d2365c | 2014-08-19 18:08:39 +0100 | [diff] [blame] | 348 | void MergeNullChecked(); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 349 | |
| 350 | template <typename Map, Map LocalValueNumbering::*map_ptr, typename Versions> |
| 351 | void MergeAliasingValues(const typename Map::value_type& entry, typename Map::iterator hint); |
| 352 | |
| 353 | GlobalValueNumbering* gvn_; |
| 354 | |
| 355 | // We're using the block id as a 16-bit operand value for some lookups. |
Andreas Gampe | 785d2f2 | 2014-11-03 22:57:30 -0800 | [diff] [blame^] | 356 | static_assert(sizeof(BasicBlockId) == sizeof(uint16_t), "BasicBlockId must be 16 bit"); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 357 | BasicBlockId id_; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 358 | |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 359 | SregValueMap sreg_value_map_; |
| 360 | SregValueMap sreg_wide_value_map_; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 361 | |
| 362 | SFieldToValueMap sfield_value_map_; |
| 363 | IFieldLocToValueMap non_aliasing_ifield_value_map_; |
| 364 | AliasingIFieldValuesMap aliasing_ifield_value_map_; |
| 365 | NonAliasingArrayValuesMap non_aliasing_array_value_map_; |
| 366 | AliasingArrayValuesMap aliasing_array_value_map_; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 367 | |
| 368 | // Data for dealing with memory clobbering and store/load aliasing. |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 369 | uint16_t global_memory_version_; |
| 370 | uint16_t unresolved_sfield_version_[kFieldTypeCount]; |
| 371 | uint16_t unresolved_ifield_version_[kFieldTypeCount]; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 372 | // Value names of references to objects that cannot be reached through a different value name. |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 373 | ValueNameSet non_aliasing_refs_; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 374 | // Previously non-aliasing refs that escaped but can still be used for non-aliasing AGET/IGET. |
| 375 | ValueNameSet escaped_refs_; |
| 376 | // Blacklists for cases where escaped_refs_ can't be used. |
| 377 | EscapedIFieldClobberSet escaped_ifield_clobber_set_; |
| 378 | EscapedArrayClobberSet escaped_array_clobber_set_; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 379 | |
| 380 | // Range check and null check elimination. |
| 381 | RangeCheckSet range_checked_; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 382 | ValueNameSet null_checked_; |
| 383 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 384 | // Reuse one vector for all merges to avoid leaking too much memory on the ArenaStack. |
| 385 | ScopedArenaVector<BasicBlockId> merge_names_; |
| 386 | // Map to identify when different locations merge the same values. |
| 387 | ScopedArenaSafeMap<ScopedArenaVector<BasicBlockId>, uint16_t> merge_map_; |
| 388 | // New memory version for merge, kNoValue if all memory versions matched. |
| 389 | uint16_t merge_new_memory_version_; |
| 390 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 391 | DISALLOW_COPY_AND_ASSIGN(LocalValueNumbering); |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 392 | }; |
| 393 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 394 | } // namespace art |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 395 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 396 | #endif // ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_ |