buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_ |
| 18 | #define ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_ |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 19 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 20 | #include <memory> |
| 21 | |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 22 | #include "compiler_internals.h" |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 23 | #include "global_value_numbering.h" |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame^] | 24 | #include "utils/arena_object.h" |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 25 | |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 26 | namespace art { |
| 27 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 28 | class DexFile; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 29 | |
| 30 | // Enable/disable tracking values stored in the FILLED_NEW_ARRAY result. |
| 31 | static constexpr bool kLocalValueNumberingEnableFilledNewArrayTracking = true; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 32 | |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame^] | 33 | class LocalValueNumbering : public DeletableArenaObject<kArenaAllocMisc> { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 34 | private: |
| 35 | static constexpr uint16_t kNoValue = GlobalValueNumbering::kNoValue; |
| 36 | |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 37 | public: |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 38 | LocalValueNumbering(GlobalValueNumbering* gvn, BasicBlockId id, ScopedArenaAllocator* allocator); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 39 | |
| 40 | BasicBlockId Id() const { |
| 41 | return id_; |
| 42 | } |
| 43 | |
| 44 | bool Equals(const LocalValueNumbering& other) const; |
| 45 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 46 | bool IsValueNullChecked(uint16_t value_name) const { |
| 47 | return null_checked_.find(value_name) != null_checked_.end(); |
| 48 | } |
| 49 | |
| 50 | bool IsSregValue(uint16_t s_reg, uint16_t value_name) const { |
| 51 | auto it = sreg_value_map_.find(s_reg); |
| 52 | if (it != sreg_value_map_.end()) { |
| 53 | return it->second == value_name; |
| 54 | } else { |
| 55 | return gvn_->HasValue(kNoValue, s_reg, kNoValue, kNoValue, value_name); |
| 56 | } |
| 57 | } |
| 58 | |
| 59 | enum MergeType { |
| 60 | kNormalMerge, |
| 61 | kCatchMerge, |
| 62 | kReturnMerge, // RETURN or PHI+RETURN. Merge only sreg maps. |
| 63 | }; |
| 64 | |
| 65 | void MergeOne(const LocalValueNumbering& other, MergeType merge_type); |
| 66 | void Merge(MergeType merge_type); // Merge gvn_->merge_lvns_. |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 67 | void PrepareEntryBlock(); |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 68 | |
| 69 | uint16_t GetValueNumber(MIR* mir); |
| 70 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 71 | private: |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 72 | // A set of value names. |
| 73 | typedef GlobalValueNumbering::ValueNameSet ValueNameSet; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 74 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 75 | // Field types correspond to the ordering of GET/PUT instructions; this order is the same |
| 76 | // for IGET, IPUT, SGET, SPUT, AGET and APUT: |
| 77 | // op 0 |
| 78 | // op_WIDE 1 |
| 79 | // op_OBJECT 2 |
| 80 | // op_BOOLEAN 3 |
| 81 | // op_BYTE 4 |
| 82 | // op_CHAR 5 |
| 83 | // op_SHORT 6 |
| 84 | static constexpr size_t kFieldTypeCount = 7; |
| 85 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 86 | // Key is s_reg, value is value name. |
| 87 | typedef ScopedArenaSafeMap<uint16_t, uint16_t> SregValueMap; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 88 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 89 | void SetOperandValueImpl(uint16_t s_reg, uint16_t value, SregValueMap* map) { |
| 90 | DCHECK_EQ(map->count(s_reg), 0u) << PrettyMethod(gvn_->cu_->method_idx, *gvn_->cu_->dex_file) |
| 91 | << " LVN id: " << id_ << ", s_reg: " << s_reg; |
| 92 | map->Put(s_reg, value); |
| 93 | } |
| 94 | |
| 95 | uint16_t GetOperandValueImpl(int s_reg, const SregValueMap* map) const { |
| 96 | uint16_t res = kNoValue; |
| 97 | auto lb = map->find(s_reg); |
| 98 | if (lb != map->end()) { |
| 99 | res = lb->second; |
| 100 | } else { |
| 101 | // Using the original value; s_reg refers to an input reg. |
| 102 | res = gvn_->LookupValue(kNoValue, s_reg, kNoValue, kNoValue); |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 103 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 104 | return res; |
| 105 | } |
| 106 | |
| 107 | void SetOperandValue(uint16_t s_reg, uint16_t value) { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 108 | DCHECK_EQ(sreg_wide_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 109 | SetOperandValueImpl(s_reg, value, &sreg_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 110 | } |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 111 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 112 | uint16_t GetOperandValue(int s_reg) const { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 113 | DCHECK_EQ(sreg_wide_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 114 | return GetOperandValueImpl(s_reg, &sreg_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 115 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 116 | |
| 117 | void SetOperandValueWide(uint16_t s_reg, uint16_t value) { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 118 | DCHECK_EQ(sreg_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 119 | SetOperandValueImpl(s_reg, value, &sreg_wide_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 120 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 121 | |
| 122 | uint16_t GetOperandValueWide(int s_reg) const { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 123 | DCHECK_EQ(sreg_value_map_.count(s_reg), 0u); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 124 | return GetOperandValueImpl(s_reg, &sreg_wide_value_map_); |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 125 | } |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 126 | |
| 127 | struct RangeCheckKey { |
| 128 | uint16_t array; |
| 129 | uint16_t index; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 130 | |
| 131 | // NOTE: Can't define this at namespace scope for a private struct. |
| 132 | bool operator==(const RangeCheckKey& other) const { |
| 133 | return array == other.array && index == other.index; |
| 134 | } |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 135 | }; |
| 136 | |
| 137 | struct RangeCheckKeyComparator { |
| 138 | bool operator()(const RangeCheckKey& lhs, const RangeCheckKey& rhs) const { |
| 139 | if (lhs.array != rhs.array) { |
| 140 | return lhs.array < rhs.array; |
| 141 | } |
| 142 | return lhs.index < rhs.index; |
| 143 | } |
| 144 | }; |
| 145 | |
| 146 | typedef ScopedArenaSet<RangeCheckKey, RangeCheckKeyComparator> RangeCheckSet; |
| 147 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 148 | // Maps instance field "location" (derived from base, field_id and type) to value name. |
| 149 | typedef ScopedArenaSafeMap<uint16_t, uint16_t> IFieldLocToValueMap; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 150 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 151 | // Maps static field id to value name |
| 152 | typedef ScopedArenaSafeMap<uint16_t, uint16_t> SFieldToValueMap; |
| 153 | |
| 154 | struct EscapedIFieldClobberKey { |
| 155 | uint16_t base; // Or array. |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 156 | uint16_t type; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 157 | uint16_t field_id; // None (kNoValue) for arrays and unresolved instance field stores. |
| 158 | |
| 159 | // NOTE: Can't define this at namespace scope for a private struct. |
| 160 | bool operator==(const EscapedIFieldClobberKey& other) const { |
| 161 | return base == other.base && type == other.type && field_id == other.field_id; |
| 162 | } |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 163 | }; |
| 164 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 165 | struct EscapedIFieldClobberKeyComparator { |
| 166 | bool operator()(const EscapedIFieldClobberKey& lhs, const EscapedIFieldClobberKey& rhs) const { |
| 167 | // Compare base first. This makes sequential iteration respect the order of base. |
| 168 | if (lhs.base != rhs.base) { |
| 169 | return lhs.base < rhs.base; |
| 170 | } |
| 171 | // Compare type second. This makes the type-clobber entries (field_id == kNoValue) last |
| 172 | // for given base and type and makes it easy to prune unnecessary entries when merging |
| 173 | // escaped_ifield_clobber_set_ from multiple LVNs. |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 174 | if (lhs.type != rhs.type) { |
| 175 | return lhs.type < rhs.type; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 176 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 177 | return lhs.field_id < rhs.field_id; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 178 | } |
| 179 | }; |
| 180 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 181 | typedef ScopedArenaSet<EscapedIFieldClobberKey, EscapedIFieldClobberKeyComparator> |
| 182 | EscapedIFieldClobberSet; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 183 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 184 | struct EscapedArrayClobberKey { |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 185 | uint16_t base; |
| 186 | uint16_t type; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 187 | |
| 188 | // NOTE: Can't define this at namespace scope for a private struct. |
| 189 | bool operator==(const EscapedArrayClobberKey& other) const { |
| 190 | return base == other.base && type == other.type; |
| 191 | } |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 192 | }; |
| 193 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 194 | struct EscapedArrayClobberKeyComparator { |
| 195 | bool operator()(const EscapedArrayClobberKey& lhs, const EscapedArrayClobberKey& rhs) const { |
| 196 | // Compare base first. This makes sequential iteration respect the order of base. |
| 197 | if (lhs.base != rhs.base) { |
| 198 | return lhs.base < rhs.base; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 199 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 200 | return lhs.type < rhs.type; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 201 | } |
| 202 | }; |
| 203 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 204 | // Clobber set for previously non-aliasing array refs that escaped. |
| 205 | typedef ScopedArenaSet<EscapedArrayClobberKey, EscapedArrayClobberKeyComparator> |
| 206 | EscapedArrayClobberSet; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 207 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 208 | // Known location values for an aliasing set. The set can be tied to one of: |
| 209 | // 1. Instance field. The locations are aliasing references used to access the field. |
| 210 | // 2. Non-aliasing array reference. The locations are indexes to the array. |
| 211 | // 3. Aliasing array type. The locations are (reference, index) pair ids assigned by GVN. |
| 212 | // In each case we keep track of the last stored value, if any, and the set of locations |
| 213 | // where it was stored. We also keep track of all values known for the current write state |
| 214 | // (load_value_map), which can be known either because they have been loaded since the last |
| 215 | // store or because they contained the last_stored_value before the store and thus could not |
| 216 | // have changed as a result. |
| 217 | struct AliasingValues { |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 218 | explicit AliasingValues(LocalValueNumbering* lvn) |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 219 | : memory_version_before_stores(kNoValue), |
| 220 | last_stored_value(kNoValue), |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 221 | store_loc_set(std::less<uint16_t>(), lvn->null_checked_.get_allocator()), |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 222 | last_load_memory_version(kNoValue), |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 223 | load_value_map(std::less<uint16_t>(), lvn->null_checked_.get_allocator()) { |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 224 | } |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 225 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 226 | uint16_t memory_version_before_stores; // kNoValue if start version for the field. |
| 227 | uint16_t last_stored_value; // Last stored value name, kNoValue if none. |
| 228 | ValueNameSet store_loc_set; // Where was last_stored_value stored. |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 229 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 230 | // Maps refs (other than stored_to) to currently known values for this field other. On write, |
| 231 | // anything that differs from the written value is removed as it may be overwritten. |
| 232 | uint16_t last_load_memory_version; // kNoValue if not known. |
| 233 | ScopedArenaSafeMap<uint16_t, uint16_t> load_value_map; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 234 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 235 | // NOTE: Can't define this at namespace scope for a private struct. |
| 236 | bool operator==(const AliasingValues& other) const { |
| 237 | return memory_version_before_stores == other.memory_version_before_stores && |
| 238 | last_load_memory_version == other.last_load_memory_version && |
| 239 | last_stored_value == other.last_stored_value && |
| 240 | store_loc_set == other.store_loc_set && |
| 241 | load_value_map == other.load_value_map; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 242 | } |
| 243 | }; |
| 244 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 245 | // Maps instance field id to AliasingValues, locations are object refs. |
| 246 | typedef ScopedArenaSafeMap<uint16_t, AliasingValues> AliasingIFieldValuesMap; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 247 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 248 | // Maps non-aliasing array reference to AliasingValues, locations are array indexes. |
| 249 | typedef ScopedArenaSafeMap<uint16_t, AliasingValues> NonAliasingArrayValuesMap; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 250 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 251 | // Maps aliasing array type to AliasingValues, locations are (array, index) pair ids. |
| 252 | typedef ScopedArenaSafeMap<uint16_t, AliasingValues> AliasingArrayValuesMap; |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 253 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 254 | // Helper classes defining versions for updating and merging the AliasingValues maps above. |
| 255 | class AliasingIFieldVersions; |
| 256 | class NonAliasingArrayVersions; |
| 257 | class AliasingArrayVersions; |
| 258 | |
| 259 | template <typename Map> |
| 260 | AliasingValues* GetAliasingValues(Map* map, const typename Map::key_type& key); |
| 261 | |
| 262 | template <typename Versions, typename KeyType> |
| 263 | void UpdateAliasingValuesLoadVersion(const KeyType& key, AliasingValues* values); |
| 264 | |
| 265 | template <typename Versions, typename Map> |
| 266 | static uint16_t AliasingValuesMergeGet(GlobalValueNumbering* gvn, |
| 267 | const LocalValueNumbering* lvn, |
| 268 | Map* map, const typename Map::key_type& key, |
| 269 | uint16_t location); |
| 270 | |
| 271 | template <typename Versions, typename Map> |
| 272 | uint16_t HandleAliasingValuesGet(Map* map, const typename Map::key_type& key, |
| 273 | uint16_t location); |
| 274 | |
| 275 | template <typename Versions, typename Map> |
| 276 | bool HandleAliasingValuesPut(Map* map, const typename Map::key_type& key, |
| 277 | uint16_t location, uint16_t value); |
| 278 | |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 279 | template <typename K> |
| 280 | void CopyAliasingValuesMap(ScopedArenaSafeMap<K, AliasingValues>* dest, |
| 281 | const ScopedArenaSafeMap<K, AliasingValues>& src); |
| 282 | |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 283 | uint16_t MarkNonAliasingNonNull(MIR* mir); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 284 | bool IsNonAliasing(uint16_t reg) const; |
| 285 | bool IsNonAliasingIField(uint16_t reg, uint16_t field_id, uint16_t type) const; |
| 286 | bool IsNonAliasingArray(uint16_t reg, uint16_t type) const; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 287 | void HandleNullCheck(MIR* mir, uint16_t reg); |
| 288 | void HandleRangeCheck(MIR* mir, uint16_t array, uint16_t index); |
| 289 | void HandlePutObject(MIR* mir); |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 290 | void HandleEscapingRef(uint16_t base); |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 291 | void HandleInvokeArgs(const MIR* mir, const LocalValueNumbering* mir_lvn); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 292 | uint16_t HandlePhi(MIR* mir); |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 293 | uint16_t HandleAGet(MIR* mir, uint16_t opcode); |
| 294 | void HandleAPut(MIR* mir, uint16_t opcode); |
| 295 | uint16_t HandleIGet(MIR* mir, uint16_t opcode); |
| 296 | void HandleIPut(MIR* mir, uint16_t opcode); |
| 297 | uint16_t HandleSGet(MIR* mir, uint16_t opcode); |
| 298 | void HandleSPut(MIR* mir, uint16_t opcode); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 299 | void RemoveSFieldsForType(uint16_t type); |
Vladimir Marko | fa23645 | 2014-09-29 17:58:10 +0100 | [diff] [blame] | 300 | void HandleInvokeOrClInitOrAcquireOp(MIR* mir); |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 301 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 302 | bool SameMemoryVersion(const LocalValueNumbering& other) const; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 303 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 304 | uint16_t NewMemoryVersion(uint16_t* new_version); |
| 305 | void MergeMemoryVersions(bool clobbered_catch); |
| 306 | |
| 307 | void PruneNonAliasingRefsForCatch(); |
| 308 | |
| 309 | template <typename Set, Set LocalValueNumbering::* set_ptr> |
| 310 | void IntersectSets(); |
| 311 | |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 312 | void CopyLiveSregValues(SregValueMap* dest, const SregValueMap& src); |
| 313 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 314 | // Intersect maps as sets. The value type must be equality-comparable. |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 315 | template <SregValueMap LocalValueNumbering::* map_ptr> |
| 316 | void IntersectSregValueMaps(); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 317 | |
| 318 | // Intersect maps as sets. The value type must be equality-comparable. |
| 319 | template <typename Map> |
| 320 | static void InPlaceIntersectMaps(Map* work_map, const Map& other_map); |
| 321 | |
| 322 | template <typename Set, Set LocalValueNumbering::*set_ptr, void (LocalValueNumbering::*MergeFn)( |
| 323 | const typename Set::value_type& entry, typename Set::iterator hint)> |
| 324 | void MergeSets(); |
| 325 | |
| 326 | void IntersectAliasingValueLocations(AliasingValues* work_values, const AliasingValues* values); |
| 327 | |
| 328 | void MergeEscapedRefs(const ValueNameSet::value_type& entry, ValueNameSet::iterator hint); |
| 329 | void MergeEscapedIFieldTypeClobberSets(const EscapedIFieldClobberSet::value_type& entry, |
| 330 | EscapedIFieldClobberSet::iterator hint); |
| 331 | void MergeEscapedIFieldClobberSets(const EscapedIFieldClobberSet::value_type& entry, |
| 332 | EscapedIFieldClobberSet::iterator hint); |
| 333 | void MergeEscapedArrayClobberSets(const EscapedArrayClobberSet::value_type& entry, |
| 334 | EscapedArrayClobberSet::iterator hint); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 335 | void MergeSFieldValues(const SFieldToValueMap::value_type& entry, |
| 336 | SFieldToValueMap::iterator hint); |
| 337 | void MergeNonAliasingIFieldValues(const IFieldLocToValueMap::value_type& entry, |
| 338 | IFieldLocToValueMap::iterator hint); |
Vladimir Marko | 2d2365c | 2014-08-19 18:08:39 +0100 | [diff] [blame] | 339 | void MergeNullChecked(); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 340 | |
| 341 | template <typename Map, Map LocalValueNumbering::*map_ptr, typename Versions> |
| 342 | void MergeAliasingValues(const typename Map::value_type& entry, typename Map::iterator hint); |
| 343 | |
| 344 | GlobalValueNumbering* gvn_; |
| 345 | |
| 346 | // We're using the block id as a 16-bit operand value for some lookups. |
| 347 | COMPILE_ASSERT(sizeof(BasicBlockId) == sizeof(uint16_t), BasicBlockId_must_be_16_bit); |
| 348 | BasicBlockId id_; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 349 | |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 350 | SregValueMap sreg_value_map_; |
| 351 | SregValueMap sreg_wide_value_map_; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 352 | |
| 353 | SFieldToValueMap sfield_value_map_; |
| 354 | IFieldLocToValueMap non_aliasing_ifield_value_map_; |
| 355 | AliasingIFieldValuesMap aliasing_ifield_value_map_; |
| 356 | NonAliasingArrayValuesMap non_aliasing_array_value_map_; |
| 357 | AliasingArrayValuesMap aliasing_array_value_map_; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 358 | |
| 359 | // Data for dealing with memory clobbering and store/load aliasing. |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 360 | uint16_t global_memory_version_; |
| 361 | uint16_t unresolved_sfield_version_[kFieldTypeCount]; |
| 362 | uint16_t unresolved_ifield_version_[kFieldTypeCount]; |
Vladimir Marko | f59f18b | 2014-02-17 15:53:57 +0000 | [diff] [blame] | 363 | // Value names of references to objects that cannot be reached through a different value name. |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 364 | ValueNameSet non_aliasing_refs_; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 365 | // Previously non-aliasing refs that escaped but can still be used for non-aliasing AGET/IGET. |
| 366 | ValueNameSet escaped_refs_; |
| 367 | // Blacklists for cases where escaped_refs_ can't be used. |
| 368 | EscapedIFieldClobberSet escaped_ifield_clobber_set_; |
| 369 | EscapedArrayClobberSet escaped_array_clobber_set_; |
Vladimir Marko | 2ac01fc | 2014-05-22 12:09:08 +0100 | [diff] [blame] | 370 | |
| 371 | // Range check and null check elimination. |
| 372 | RangeCheckSet range_checked_; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 373 | ValueNameSet null_checked_; |
| 374 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 375 | // Reuse one vector for all merges to avoid leaking too much memory on the ArenaStack. |
| 376 | ScopedArenaVector<BasicBlockId> merge_names_; |
| 377 | // Map to identify when different locations merge the same values. |
| 378 | ScopedArenaSafeMap<ScopedArenaVector<BasicBlockId>, uint16_t> merge_map_; |
| 379 | // New memory version for merge, kNoValue if all memory versions matched. |
| 380 | uint16_t merge_new_memory_version_; |
| 381 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 382 | DISALLOW_COPY_AND_ASSIGN(LocalValueNumbering); |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 383 | }; |
| 384 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 385 | } // namespace art |
buzbee | 2502e00 | 2012-12-31 16:05:53 -0800 | [diff] [blame] | 386 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 387 | #endif // ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_ |