Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ |
| 18 | #define ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 19 | |
| 20 | #include "dex_cache.h" |
| 21 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 22 | #include <android-base/logging.h> |
| 23 | |
Andreas Gampe | a1d2f95 | 2017-04-20 22:53:58 -0700 | [diff] [blame] | 24 | #include "art_field.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 25 | #include "art_method.h" |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 26 | #include "base/casts.h" |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 27 | #include "base/enums.h" |
Andreas Gampe | a1d2f95 | 2017-04-20 22:53:58 -0700 | [diff] [blame] | 28 | #include "class_linker.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 29 | #include "dex/dex_file.h" |
Vladimir Marko | 317892b | 2018-05-31 11:11:32 +0100 | [diff] [blame] | 30 | #include "gc_root-inl.h" |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 31 | #include "mirror/call_site.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 32 | #include "mirror/class.h" |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 33 | #include "mirror/method_type.h" |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 34 | #include "obj_ptr.h" |
Andreas Gampe | 88dbad3 | 2018-06-26 19:54:12 -0700 | [diff] [blame] | 35 | #include "object-inl.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 36 | #include "runtime.h" |
Mathieu Chartier | 88ea61e | 2018-06-20 17:45:41 -0700 | [diff] [blame] | 37 | #include "write_barrier-inl.h" |
Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 38 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 39 | #include <atomic> |
| 40 | |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 41 | namespace art { |
| 42 | namespace mirror { |
| 43 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 44 | template <typename T> |
Andreas Gampe | 88dbad3 | 2018-06-26 19:54:12 -0700 | [diff] [blame] | 45 | inline DexCachePair<T>::DexCachePair(ObjPtr<T> object, uint32_t index) |
| 46 | : object(object), index(index) {} |
| 47 | |
| 48 | template <typename T> |
| 49 | inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) { |
| 50 | DexCachePair<T> first_elem; |
| 51 | first_elem.object = GcRoot<T>(nullptr); |
| 52 | first_elem.index = InvalidIndexForSlot(0); |
| 53 | dex_cache[0].store(first_elem, std::memory_order_relaxed); |
| 54 | } |
| 55 | |
| 56 | template <typename T> |
| 57 | inline T* DexCachePair<T>::GetObjectForIndex(uint32_t idx) { |
| 58 | if (idx != index) { |
| 59 | return nullptr; |
| 60 | } |
| 61 | DCHECK(!object.IsNull()); |
| 62 | return object.Read(); |
| 63 | } |
| 64 | |
| 65 | template <typename T> |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 66 | inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache, |
| 67 | PointerSize pointer_size) { |
| 68 | NativeDexCachePair<T> first_elem; |
| 69 | first_elem.object = nullptr; |
| 70 | first_elem.index = InvalidIndexForSlot(0); |
| 71 | DexCache::SetNativePairPtrSize(dex_cache, 0, first_elem, pointer_size); |
| 72 | } |
| 73 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 74 | inline uint32_t DexCache::ClassSize(PointerSize pointer_size) { |
Narayan Kamath | 6b2dc31 | 2017-03-14 13:26:12 +0000 | [diff] [blame] | 75 | const uint32_t vtable_entries = Object::kVTableLength; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 76 | return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size); |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 77 | } |
| 78 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 79 | inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) { |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 80 | DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds()); |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 81 | const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize; |
| 82 | DCHECK_LT(slot_idx, NumStrings()); |
| 83 | return slot_idx; |
Andreas Gampe | aa910d5 | 2014-07-30 18:59:05 -0700 | [diff] [blame] | 84 | } |
| 85 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 86 | inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) { |
Mathieu Chartier | 1ca718e | 2018-10-23 12:55:34 -0700 | [diff] [blame] | 87 | const uint32_t num_preresolved_strings = NumPreResolvedStrings(); |
| 88 | if (num_preresolved_strings != 0u) { |
| 89 | DCHECK_LT(string_idx.index_, num_preresolved_strings); |
| 90 | DCHECK_EQ(num_preresolved_strings, GetDexFile()->NumStringIds()); |
| 91 | mirror::String* string = GetPreResolvedStrings()[string_idx.index_].Read(); |
| 92 | if (LIKELY(string != nullptr)) { |
| 93 | return string; |
| 94 | } |
| 95 | } |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 96 | return GetStrings()[StringSlotIndex(string_idx)].load( |
| 97 | std::memory_order_relaxed).GetObjectForIndex(string_idx.index_); |
| 98 | } |
| 99 | |
| 100 | inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) { |
| 101 | DCHECK(resolved != nullptr); |
| 102 | GetStrings()[StringSlotIndex(string_idx)].store( |
| 103 | StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 104 | Runtime* const runtime = Runtime::Current(); |
| 105 | if (UNLIKELY(runtime->IsActiveTransaction())) { |
| 106 | DCHECK(runtime->IsAotCompiler()); |
| 107 | runtime->RecordResolveString(this, string_idx); |
| 108 | } |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 109 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
Mathieu Chartier | 88ea61e | 2018-06-20 17:45:41 -0700 | [diff] [blame] | 110 | WriteBarrier::ForEveryFieldWrite(this); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 111 | } |
| 112 | |
Mathieu Chartier | a88abfa | 2019-02-04 11:08:29 -0800 | [diff] [blame] | 113 | inline void DexCache::SetPreResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) { |
Mathieu Chartier | 1ca718e | 2018-10-23 12:55:34 -0700 | [diff] [blame] | 114 | DCHECK(resolved != nullptr); |
| 115 | DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds()); |
| 116 | GetPreResolvedStrings()[string_idx.index_] = GcRoot<mirror::String>(resolved); |
| 117 | Runtime* const runtime = Runtime::Current(); |
| 118 | CHECK(runtime->IsAotCompiler()); |
| 119 | CHECK(!runtime->IsActiveTransaction()); |
| 120 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
| 121 | WriteBarrier::ForEveryFieldWrite(this); |
| 122 | } |
| 123 | |
Mathieu Chartier | a88abfa | 2019-02-04 11:08:29 -0800 | [diff] [blame] | 124 | inline void DexCache::ClearPreResolvedStrings() { |
| 125 | SetFieldPtr64</*kTransactionActive=*/false, |
| 126 | /*kCheckTransaction=*/false, |
| 127 | kVerifyNone, |
| 128 | GcRoot<mirror::String>*>(PreResolvedStringsOffset(), nullptr); |
| 129 | SetField32</*kTransactionActive=*/false, |
| 130 | /*bool kCheckTransaction=*/false, |
| 131 | kVerifyNone, |
| 132 | /*kIsVolatile=*/false>(NumPreResolvedStringsOffset(), 0); |
| 133 | } |
| 134 | |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 135 | inline void DexCache::ClearString(dex::StringIndex string_idx) { |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 136 | DCHECK(Runtime::Current()->IsAotCompiler()); |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 137 | uint32_t slot_idx = StringSlotIndex(string_idx); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 138 | StringDexCacheType* slot = &GetStrings()[slot_idx]; |
| 139 | // This is racy but should only be called from the transactional interpreter. |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 140 | if (slot->load(std::memory_order_relaxed).index == string_idx.index_) { |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 141 | StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx)); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 142 | slot->store(cleared, std::memory_order_relaxed); |
| 143 | } |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 144 | } |
| 145 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 146 | inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) { |
| 147 | DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds()); |
| 148 | const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize; |
| 149 | DCHECK_LT(slot_idx, NumResolvedTypes()); |
| 150 | return slot_idx; |
| 151 | } |
| 152 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 153 | inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) { |
Mathieu Chartier | 5df32d3 | 2016-12-06 16:02:27 -0800 | [diff] [blame] | 154 | // It is theorized that a load acquire is not required since obtaining the resolved class will |
Mathieu Chartier | db70ce5 | 2016-12-12 11:06:59 -0800 | [diff] [blame] | 155 | // always have an address dependency or a lock. |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 156 | return GetResolvedTypes()[TypeSlotIndex(type_idx)].load( |
| 157 | std::memory_order_relaxed).GetObjectForIndex(type_idx.index_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 158 | } |
| 159 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 160 | inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) { |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 161 | DCHECK(resolved != nullptr); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 162 | // TODO default transaction support. |
Mathieu Chartier | 5df32d3 | 2016-12-06 16:02:27 -0800 | [diff] [blame] | 163 | // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a |
| 164 | // class but not necessarily seeing the loaded members like the static fields array. |
| 165 | // See b/32075261. |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 166 | GetResolvedTypes()[TypeSlotIndex(type_idx)].store( |
| 167 | TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 168 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
Mathieu Chartier | 88ea61e | 2018-06-20 17:45:41 -0700 | [diff] [blame] | 169 | WriteBarrier::ForEveryFieldWrite(this); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 170 | } |
| 171 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 172 | inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) { |
| 173 | DCHECK(Runtime::Current()->IsAotCompiler()); |
| 174 | uint32_t slot_idx = TypeSlotIndex(type_idx); |
| 175 | TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx]; |
| 176 | // This is racy but should only be called from the single-threaded ImageWriter and tests. |
| 177 | if (slot->load(std::memory_order_relaxed).index == type_idx.index_) { |
| 178 | TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx)); |
| 179 | slot->store(cleared, std::memory_order_relaxed); |
| 180 | } |
| 181 | } |
| 182 | |
Orion Hodson | 06d10a7 | 2018-05-14 08:53:38 +0100 | [diff] [blame] | 183 | inline uint32_t DexCache::MethodTypeSlotIndex(dex::ProtoIndex proto_idx) { |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 184 | DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); |
Orion Hodson | 06d10a7 | 2018-05-14 08:53:38 +0100 | [diff] [blame] | 185 | DCHECK_LT(proto_idx.index_, GetDexFile()->NumProtoIds()); |
| 186 | const uint32_t slot_idx = proto_idx.index_ % kDexCacheMethodTypeCacheSize; |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 187 | DCHECK_LT(slot_idx, NumResolvedMethodTypes()); |
| 188 | return slot_idx; |
| 189 | } |
| 190 | |
Orion Hodson | 06d10a7 | 2018-05-14 08:53:38 +0100 | [diff] [blame] | 191 | inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) { |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 192 | return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load( |
Orion Hodson | 06d10a7 | 2018-05-14 08:53:38 +0100 | [diff] [blame] | 193 | std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 194 | } |
| 195 | |
Orion Hodson | 06d10a7 | 2018-05-14 08:53:38 +0100 | [diff] [blame] | 196 | inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) { |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 197 | DCHECK(resolved != nullptr); |
| 198 | GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store( |
Orion Hodson | 06d10a7 | 2018-05-14 08:53:38 +0100 | [diff] [blame] | 199 | MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 200 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
Mathieu Chartier | 88ea61e | 2018-06-20 17:45:41 -0700 | [diff] [blame] | 201 | WriteBarrier::ForEveryFieldWrite(this); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 202 | } |
| 203 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 204 | inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) { |
| 205 | DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); |
| 206 | DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds()); |
| 207 | GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx]; |
| 208 | Atomic<GcRoot<mirror::CallSite>>& ref = |
| 209 | reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target); |
Orion Hodson | 88591fe | 2018-03-06 13:35:43 +0000 | [diff] [blame] | 210 | return ref.load(std::memory_order_seq_cst).Read(); |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 211 | } |
| 212 | |
Orion Hodson | 4c8e12e | 2018-05-18 08:33:20 +0100 | [diff] [blame] | 213 | inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx, |
| 214 | ObjPtr<CallSite> call_site) { |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 215 | DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); |
| 216 | DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds()); |
| 217 | |
| 218 | GcRoot<mirror::CallSite> null_call_site(nullptr); |
| 219 | GcRoot<mirror::CallSite> candidate(call_site); |
| 220 | GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx]; |
| 221 | |
| 222 | // The first assignment for a given call site wins. |
| 223 | Atomic<GcRoot<mirror::CallSite>>& ref = |
| 224 | reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target); |
Orion Hodson | 4557b38 | 2018-01-03 11:47:54 +0000 | [diff] [blame] | 225 | if (ref.CompareAndSetStrongSequentiallyConsistent(null_call_site, candidate)) { |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 226 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
Mathieu Chartier | 88ea61e | 2018-06-20 17:45:41 -0700 | [diff] [blame] | 227 | WriteBarrier::ForEveryFieldWrite(this); |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 228 | return call_site; |
| 229 | } else { |
| 230 | return target.Read(); |
| 231 | } |
| 232 | } |
| 233 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 234 | inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) { |
| 235 | DCHECK_LT(field_idx, GetDexFile()->NumFieldIds()); |
| 236 | const uint32_t slot_idx = field_idx % kDexCacheFieldCacheSize; |
| 237 | DCHECK_LT(slot_idx, NumResolvedFields()); |
| 238 | return slot_idx; |
| 239 | } |
| 240 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 241 | inline ArtField* DexCache::GetResolvedField(uint32_t field_idx, PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 242 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 243 | auto pair = GetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), ptr_size); |
| 244 | return pair.GetObjectForIndex(field_idx); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 245 | } |
| 246 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 247 | inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field, PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 248 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 249 | DCHECK(field != nullptr); |
| 250 | FieldDexCachePair pair(field, field_idx); |
| 251 | SetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), pair, ptr_size); |
| 252 | } |
| 253 | |
| 254 | inline void DexCache::ClearResolvedField(uint32_t field_idx, PointerSize ptr_size) { |
| 255 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
| 256 | uint32_t slot_idx = FieldSlotIndex(field_idx); |
| 257 | auto* resolved_fields = GetResolvedFields(); |
| 258 | // This is racy but should only be called from the single-threaded ImageWriter. |
| 259 | DCHECK(Runtime::Current()->IsAotCompiler()); |
| 260 | if (GetNativePairPtrSize(resolved_fields, slot_idx, ptr_size).index == field_idx) { |
| 261 | FieldDexCachePair cleared(nullptr, FieldDexCachePair::InvalidIndexForSlot(slot_idx)); |
| 262 | SetNativePairPtrSize(resolved_fields, slot_idx, cleared, ptr_size); |
| 263 | } |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 264 | } |
| 265 | |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 266 | inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) { |
| 267 | DCHECK_LT(method_idx, GetDexFile()->NumMethodIds()); |
| 268 | const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize; |
| 269 | DCHECK_LT(slot_idx, NumResolvedMethods()); |
| 270 | return slot_idx; |
| 271 | } |
| 272 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 273 | inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 274 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 275 | auto pair = GetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), ptr_size); |
| 276 | return pair.GetObjectForIndex(method_idx); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 277 | } |
| 278 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 279 | inline void DexCache::SetResolvedMethod(uint32_t method_idx, |
| 280 | ArtMethod* method, |
| 281 | PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 282 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 283 | DCHECK(method != nullptr); |
| 284 | MethodDexCachePair pair(method, method_idx); |
| 285 | SetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), pair, ptr_size); |
| 286 | } |
| 287 | |
| 288 | inline void DexCache::ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) { |
| 289 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
| 290 | uint32_t slot_idx = MethodSlotIndex(method_idx); |
| 291 | auto* resolved_methods = GetResolvedMethods(); |
| 292 | // This is racy but should only be called from the single-threaded ImageWriter. |
| 293 | DCHECK(Runtime::Current()->IsAotCompiler()); |
| 294 | if (GetNativePairPtrSize(resolved_methods, slot_idx, ptr_size).index == method_idx) { |
| 295 | MethodDexCachePair cleared(nullptr, MethodDexCachePair::InvalidIndexForSlot(slot_idx)); |
| 296 | SetNativePairPtrSize(resolved_methods, slot_idx, cleared, ptr_size); |
| 297 | } |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 298 | } |
| 299 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 300 | template <typename T> |
| 301 | NativeDexCachePair<T> DexCache::GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, |
| 302 | size_t idx, |
| 303 | PointerSize ptr_size) { |
| 304 | if (ptr_size == PointerSize::k64) { |
| 305 | auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array); |
| 306 | ConversionPair64 value = AtomicLoadRelaxed16B(&array[idx]); |
| 307 | return NativeDexCachePair<T>(reinterpret_cast64<T*>(value.first), |
| 308 | dchecked_integral_cast<size_t>(value.second)); |
| 309 | } else { |
| 310 | auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array); |
| 311 | ConversionPair32 value = array[idx].load(std::memory_order_relaxed); |
Vladimir Marko | ca8de0a | 2018-07-04 11:56:08 +0100 | [diff] [blame] | 312 | return NativeDexCachePair<T>(reinterpret_cast32<T*>(value.first), value.second); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 313 | } |
| 314 | } |
| 315 | |
| 316 | template <typename T> |
| 317 | void DexCache::SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, |
| 318 | size_t idx, |
| 319 | NativeDexCachePair<T> pair, |
| 320 | PointerSize ptr_size) { |
| 321 | if (ptr_size == PointerSize::k64) { |
| 322 | auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array); |
| 323 | ConversionPair64 v(reinterpret_cast64<uint64_t>(pair.object), pair.index); |
| 324 | AtomicStoreRelease16B(&array[idx], v); |
| 325 | } else { |
| 326 | auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array); |
Vladimir Marko | ca8de0a | 2018-07-04 11:56:08 +0100 | [diff] [blame] | 327 | ConversionPair32 v(reinterpret_cast32<uint32_t>(pair.object), |
| 328 | dchecked_integral_cast<uint32_t>(pair.index)); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 329 | array[idx].store(v, std::memory_order_release); |
| 330 | } |
| 331 | } |
| 332 | |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 333 | template <typename T, |
| 334 | ReadBarrierOption kReadBarrierOption, |
| 335 | typename Visitor> |
| 336 | inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs, |
| 337 | size_t num_pairs, |
| 338 | const Visitor& visitor) |
| 339 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { |
| 340 | for (size_t i = 0; i < num_pairs; ++i) { |
| 341 | DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed); |
| 342 | // NOTE: We need the "template" keyword here to avoid a compilation |
| 343 | // failure. GcRoot<T> is a template argument-dependent type and we need to |
| 344 | // tell the compiler to treat "Read" as a template rather than a field or |
| 345 | // function. Otherwise, on encountering the "<" token, the compiler would |
| 346 | // treat "Read" as a field. |
Mathieu Chartier | 6b4c287 | 2016-11-01 14:45:26 -0700 | [diff] [blame] | 347 | T* const before = source.object.template Read<kReadBarrierOption>(); |
| 348 | visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier()); |
| 349 | if (source.object.template Read<kReadBarrierOption>() != before) { |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 350 | pairs[i].store(source, std::memory_order_relaxed); |
| 351 | } |
| 352 | } |
| 353 | } |
| 354 | |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 355 | template <bool kVisitNativeRoots, |
| 356 | VerifyObjectFlags kVerifyFlags, |
| 357 | ReadBarrierOption kReadBarrierOption, |
| 358 | typename Visitor> |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 359 | inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 360 | // Visit instance fields first. |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 361 | VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 362 | // Visit arrays after. |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 363 | if (kVisitNativeRoots) { |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 364 | VisitDexCachePairs<String, kReadBarrierOption, Visitor>( |
Vladimir Marko | 98db89c | 2018-09-07 11:45:46 +0100 | [diff] [blame] | 365 | GetStrings<kVerifyFlags>(), NumStrings<kVerifyFlags>(), visitor); |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 366 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 367 | VisitDexCachePairs<Class, kReadBarrierOption, Visitor>( |
Vladimir Marko | 98db89c | 2018-09-07 11:45:46 +0100 | [diff] [blame] | 368 | GetResolvedTypes<kVerifyFlags>(), NumResolvedTypes<kVerifyFlags>(), visitor); |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 369 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 370 | VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>( |
Vladimir Marko | 98db89c | 2018-09-07 11:45:46 +0100 | [diff] [blame] | 371 | GetResolvedMethodTypes<kVerifyFlags>(), NumResolvedMethodTypes<kVerifyFlags>(), visitor); |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 372 | |
Vladimir Marko | 98db89c | 2018-09-07 11:45:46 +0100 | [diff] [blame] | 373 | GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>(); |
| 374 | size_t num_call_sites = NumResolvedCallSites<kVerifyFlags>(); |
| 375 | for (size_t i = 0; i != num_call_sites; ++i) { |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 376 | visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier()); |
| 377 | } |
Mathieu Chartier | 1ca718e | 2018-10-23 12:55:34 -0700 | [diff] [blame] | 378 | |
| 379 | GcRoot<mirror::String>* const preresolved_strings = GetPreResolvedStrings(); |
| 380 | const size_t num_preresolved_strings = NumPreResolvedStrings(); |
| 381 | for (size_t i = 0; i != num_preresolved_strings; ++i) { |
| 382 | visitor.VisitRootIfNonNull(preresolved_strings[i].AddressWithoutBarrier()); |
| 383 | } |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 384 | } |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 385 | } |
| 386 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 387 | template <ReadBarrierOption kReadBarrierOption, typename Visitor> |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 388 | inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) { |
| 389 | StringDexCacheType* src = GetStrings(); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 390 | for (size_t i = 0, count = NumStrings(); i < count; ++i) { |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 391 | StringDexCachePair source = src[i].load(std::memory_order_relaxed); |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 392 | String* ptr = source.object.Read<kReadBarrierOption>(); |
| 393 | String* new_source = visitor(ptr); |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 394 | source.object = GcRoot<String>(new_source); |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 395 | dest[i].store(source, std::memory_order_relaxed); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 396 | } |
| 397 | } |
| 398 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 399 | template <ReadBarrierOption kReadBarrierOption, typename Visitor> |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 400 | inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) { |
| 401 | TypeDexCacheType* src = GetResolvedTypes(); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 402 | for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) { |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 403 | TypeDexCachePair source = src[i].load(std::memory_order_relaxed); |
| 404 | Class* ptr = source.object.Read<kReadBarrierOption>(); |
| 405 | Class* new_source = visitor(ptr); |
| 406 | source.object = GcRoot<Class>(new_source); |
| 407 | dest[i].store(source, std::memory_order_relaxed); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 408 | } |
| 409 | } |
| 410 | |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 411 | template <ReadBarrierOption kReadBarrierOption, typename Visitor> |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 412 | inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 413 | const Visitor& visitor) { |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 414 | MethodTypeDexCacheType* src = GetResolvedMethodTypes(); |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 415 | for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) { |
| 416 | MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed); |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 417 | MethodType* ptr = source.object.Read<kReadBarrierOption>(); |
| 418 | MethodType* new_source = visitor(ptr); |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 419 | source.object = GcRoot<MethodType>(new_source); |
| 420 | dest[i].store(source, std::memory_order_relaxed); |
| 421 | } |
| 422 | } |
| 423 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 424 | template <ReadBarrierOption kReadBarrierOption, typename Visitor> |
| 425 | inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, |
| 426 | const Visitor& visitor) { |
| 427 | GcRoot<mirror::CallSite>* src = GetResolvedCallSites(); |
| 428 | for (size_t i = 0, count = NumResolvedCallSites(); i < count; ++i) { |
| 429 | mirror::CallSite* source = src[i].Read<kReadBarrierOption>(); |
| 430 | mirror::CallSite* new_source = visitor(source); |
| 431 | dest[i] = GcRoot<mirror::CallSite>(new_source); |
| 432 | } |
| 433 | } |
| 434 | |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 435 | } // namespace mirror |
| 436 | } // namespace art |
| 437 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 438 | #endif // ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ |