summaryrefslogtreecommitdiff
path: root/runtime/mirror/dex_cache-inl.h
diff options
context:
space:
mode:
author Nicolas Geoffray <ngeoffray@google.com> 2022-10-10 12:07:46 +0100
committer Nicolas Geoffray <ngeoffray@google.com> 2022-10-13 11:36:34 +0000
commit5111cb6b246c62a40806940076f9e10761f5f41c (patch)
treeef27b3465eef3c219e75d13276cecaf9441d6c4e /runtime/mirror/dex_cache-inl.h
parentd828fc10982300728d02c97dfac1a0e3684551c8 (diff)
Refactor and cleanup DexCache.
- Introduce macros to avoid duplicating code for each dex cache kind. - Remove preResolvedStrings, this was unused. - Remove dex cache length fields, we can easily infer them. Test: test.py Change-Id: I1e0bc8cf078ce8e09c4d756c63be32cb344fcce1
Diffstat (limited to 'runtime/mirror/dex_cache-inl.h')
-rw-r--r--runtime/mirror/dex_cache-inl.h219
1 files changed, 58 insertions, 161 deletions
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 402bb72945..3cbfb40253 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -54,10 +54,7 @@ static void InitializeArray(GcRoot<T>*) {
}
template<typename T, size_t kMaxCacheSize>
-T* DexCache::AllocArray(MemberOffset obj_offset,
- MemberOffset num_offset,
- size_t num,
- LinearAllocKind kind) {
+T* DexCache::AllocArray(MemberOffset obj_offset, size_t num, LinearAllocKind kind) {
num = std::min<size_t>(num, kMaxCacheSize);
if (num == 0) {
return nullptr;
@@ -79,7 +76,6 @@ T* DexCache::AllocArray(MemberOffset obj_offset,
}
array = reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16), kind));
InitializeArray(array); // Ensure other threads see the array initialized.
- dex_cache->SetField32Volatile<false, false>(num_offset, num);
dex_cache->SetField64Volatile<false, false>(obj_offset, reinterpret_cast64<uint64_t>(array));
return array;
}
@@ -89,14 +85,6 @@ inline DexCachePair<T>::DexCachePair(ObjPtr<T> object, uint32_t index)
: object(object), index(index) {}
template <typename T>
-inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
- DexCachePair<T> first_elem;
- first_elem.object = GcRoot<T>(nullptr);
- first_elem.index = InvalidIndexForSlot(0);
- dex_cache[0].store(first_elem, std::memory_order_relaxed);
-}
-
-template <typename T>
inline T* DexCachePair<T>::GetObjectForIndex(uint32_t idx) {
if (idx != index) {
return nullptr;
@@ -106,11 +94,22 @@ inline T* DexCachePair<T>::GetObjectForIndex(uint32_t idx) {
}
template <typename T>
+inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
+ DexCachePair<T> first_elem;
+ first_elem.object = GcRoot<T>(nullptr);
+ first_elem.index = InvalidIndexForSlot(0);
+ dex_cache[0].store(first_elem, std::memory_order_relaxed);
+}
+
+template <typename T>
inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache) {
NativeDexCachePair<T> first_elem;
first_elem.object = nullptr;
first_elem.index = InvalidIndexForSlot(0);
- DexCache::SetNativePair(dex_cache, 0, first_elem);
+
+ auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(dex_cache);
+ AtomicPair<uintptr_t> v(reinterpret_cast<size_t>(first_elem.object), first_elem.index);
+ AtomicPairStoreRelease(&array[0], v);
}
inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
@@ -118,34 +117,21 @@ inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
}
-inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) {
- DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds());
- const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize;
- DCHECK_LT(slot_idx, NumStrings());
- return slot_idx;
-}
-
inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
- StringDexCacheType* strings = GetStrings();
+ auto* strings = GetStrings();
if (UNLIKELY(strings == nullptr)) {
return nullptr;
}
- return strings[StringSlotIndex(string_idx)].load(
- std::memory_order_relaxed).GetObjectForIndex(string_idx.index_);
+ return strings->Get(string_idx.index_);
}
inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
DCHECK(resolved != nullptr);
- StringDexCacheType* strings = GetStrings();
+ auto* strings = GetStrings();
if (UNLIKELY(strings == nullptr)) {
- strings = AllocArray<StringDexCacheType, kDexCacheStringCacheSize>(
- StringsOffset(),
- NumStringsOffset(),
- GetDexFile()->NumStringIds(),
- LinearAllocKind::kDexCacheArray);
+ strings = AllocateStrings();
}
- strings[StringSlotIndex(string_idx)].store(
- StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed);
+ strings->Set(string_idx.index_, resolved.Ptr());
Runtime* const runtime = Runtime::Current();
if (UNLIKELY(runtime->IsActiveTransaction())) {
DCHECK(runtime->IsAotCompiler());
@@ -157,102 +143,63 @@ inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<Stri
inline void DexCache::ClearString(dex::StringIndex string_idx) {
DCHECK(Runtime::Current()->IsAotCompiler());
- uint32_t slot_idx = StringSlotIndex(string_idx);
- StringDexCacheType* strings = GetStrings();
+ auto* strings = GetStrings();
if (UNLIKELY(strings == nullptr)) {
return;
}
- StringDexCacheType* slot = &strings[slot_idx];
- // This is racy but should only be called from the transactional interpreter.
- if (slot->load(std::memory_order_relaxed).index == string_idx.index_) {
- StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx));
- slot->store(cleared, std::memory_order_relaxed);
- }
-}
-
-inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) {
- DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds());
- const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize;
- DCHECK_LT(slot_idx, NumResolvedTypes());
- return slot_idx;
+ strings->Clear(string_idx.index_);
}
inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
// It is theorized that a load acquire is not required since obtaining the resolved class will
// always have an address dependency or a lock.
- TypeDexCacheType* resolved_types = GetResolvedTypes();
+ auto* resolved_types = GetResolvedTypes();
if (UNLIKELY(resolved_types == nullptr)) {
return nullptr;
}
- return resolved_types[TypeSlotIndex(type_idx)].load(
- std::memory_order_relaxed).GetObjectForIndex(type_idx.index_);
+ return resolved_types->Get(type_idx.index_);
}
inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) {
DCHECK(resolved != nullptr);
DCHECK(resolved->IsResolved()) << resolved->GetStatus();
- TypeDexCacheType* resolved_types = GetResolvedTypes();
+ auto* resolved_types = GetResolvedTypes();
if (UNLIKELY(resolved_types == nullptr)) {
- resolved_types = AllocArray<TypeDexCacheType, kDexCacheTypeCacheSize>(
- ResolvedTypesOffset(),
- NumResolvedTypesOffset(),
- GetDexFile()->NumTypeIds(),
- LinearAllocKind::kDexCacheArray);
+ resolved_types = AllocateResolvedTypes();
}
// TODO default transaction support.
// Use a release store for SetResolvedType. This is done to prevent other threads from seeing a
// class but not necessarily seeing the loaded members like the static fields array.
// See b/32075261.
- resolved_types[TypeSlotIndex(type_idx)].store(
- TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release);
+ resolved_types->Set(type_idx.index_, resolved.Ptr());
// TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
WriteBarrier::ForEveryFieldWrite(this);
}
inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
DCHECK(Runtime::Current()->IsAotCompiler());
- TypeDexCacheType* resolved_types = GetResolvedTypes();
+ auto* resolved_types = GetResolvedTypes();
if (UNLIKELY(resolved_types == nullptr)) {
return;
}
- uint32_t slot_idx = TypeSlotIndex(type_idx);
- TypeDexCacheType* slot = &resolved_types[slot_idx];
- // This is racy but should only be called from the single-threaded ImageWriter and tests.
- if (slot->load(std::memory_order_relaxed).index == type_idx.index_) {
- TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx));
- slot->store(cleared, std::memory_order_relaxed);
- }
-}
-
-inline uint32_t DexCache::MethodTypeSlotIndex(dex::ProtoIndex proto_idx) {
- DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
- DCHECK_LT(proto_idx.index_, GetDexFile()->NumProtoIds());
- const uint32_t slot_idx = proto_idx.index_ % kDexCacheMethodTypeCacheSize;
- DCHECK_LT(slot_idx, NumResolvedMethodTypes());
- return slot_idx;
+ resolved_types->Clear(type_idx.index_);
}
inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) {
- MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
+ auto* methods = GetResolvedMethodTypes();
if (UNLIKELY(methods == nullptr)) {
return nullptr;
}
- return methods[MethodTypeSlotIndex(proto_idx)].load(
- std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_);
+ return methods->Get(proto_idx.index_);
}
inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) {
DCHECK(resolved != nullptr);
- MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
+ auto* methods = GetResolvedMethodTypes();
if (UNLIKELY(methods == nullptr)) {
- methods = AllocArray<MethodTypeDexCacheType, kDexCacheMethodTypeCacheSize>(
- ResolvedMethodTypesOffset(),
- NumResolvedMethodTypesOffset(),
- GetDexFile()->NumProtoIds(),
- LinearAllocKind::kDexCacheArray);
+ methods = AllocateResolvedMethodTypes();
}
- methods[MethodTypeSlotIndex(proto_idx)].store(
- MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed);
+ methods->Set(proto_idx.index_, resolved);
Runtime* const runtime = Runtime::Current();
if (UNLIKELY(runtime->IsActiveTransaction())) {
DCHECK(runtime->IsAotCompiler());
@@ -264,14 +211,11 @@ inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodTyp
inline void DexCache::ClearMethodType(dex::ProtoIndex proto_idx) {
DCHECK(Runtime::Current()->IsAotCompiler());
- uint32_t slot_idx = MethodTypeSlotIndex(proto_idx);
- MethodTypeDexCacheType* slot = &GetResolvedMethodTypes()[slot_idx];
- // This is racy but should only be called from the transactional interpreter.
- if (slot->load(std::memory_order_relaxed).index == proto_idx.index_) {
- MethodTypeDexCachePair cleared(nullptr,
- MethodTypeDexCachePair::InvalidIndexForSlot(proto_idx.index_));
- slot->store(cleared, std::memory_order_relaxed);
+ auto* methods = GetResolvedMethodTypes();
+ if (methods == nullptr) {
+ return;
}
+ methods->Clear(proto_idx.index_);
}
inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
@@ -296,11 +240,7 @@ inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx,
GcRoot<mirror::CallSite> candidate(call_site);
GcRoot<CallSite>* call_sites = GetResolvedCallSites();
if (UNLIKELY(call_sites == nullptr)) {
- call_sites = AllocArray<GcRoot<CallSite>, std::numeric_limits<size_t>::max()>(
- ResolvedCallSitesOffset(),
- NumResolvedCallSitesOffset(),
- GetDexFile()->NumCallSiteIds(),
- LinearAllocKind::kGCRootArray);
+ call_sites = AllocateResolvedCallSites();
}
GcRoot<mirror::CallSite>& target = call_sites[call_site_idx];
@@ -316,103 +256,60 @@ inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx,
}
}
-inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) {
- DCHECK_LT(field_idx, GetDexFile()->NumFieldIds());
- const uint32_t slot_idx = field_idx % kDexCacheFieldCacheSize;
- DCHECK_LT(slot_idx, NumResolvedFields());
- return slot_idx;
-}
-
inline ArtField* DexCache::GetResolvedField(uint32_t field_idx) {
- FieldDexCacheType* fields = GetResolvedFields();
+ auto* fields = GetResolvedFields();
if (UNLIKELY(fields == nullptr)) {
return nullptr;
}
- auto pair = GetNativePair(fields, FieldSlotIndex(field_idx));
- return pair.GetObjectForIndex(field_idx);
+ return fields->Get(field_idx);
}
inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field) {
DCHECK(field != nullptr);
- FieldDexCachePair pair(field, field_idx);
- FieldDexCacheType* fields = GetResolvedFields();
+ auto* fields = GetResolvedFields();
if (UNLIKELY(fields == nullptr)) {
- fields = AllocArray<FieldDexCacheType, kDexCacheFieldCacheSize>(
- ResolvedFieldsOffset(),
- NumResolvedFieldsOffset(),
- GetDexFile()->NumFieldIds(),
- LinearAllocKind::kNoGCRoots);
+ fields = AllocateResolvedFields();
}
- SetNativePair(fields, FieldSlotIndex(field_idx), pair);
-}
-
-inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
- DCHECK_LT(method_idx, GetDexFile()->NumMethodIds());
- const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize;
- DCHECK_LT(slot_idx, NumResolvedMethods());
- return slot_idx;
+ fields->Set(field_idx, field);
}
inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx) {
- MethodDexCacheType* methods = GetResolvedMethods();
+ auto* methods = GetResolvedMethods();
if (UNLIKELY(methods == nullptr)) {
return nullptr;
}
- auto pair = GetNativePair(methods, MethodSlotIndex(method_idx));
- return pair.GetObjectForIndex(method_idx);
+ return methods->Get(method_idx);
}
inline void DexCache::SetResolvedMethod(uint32_t method_idx, ArtMethod* method) {
DCHECK(method != nullptr);
- MethodDexCachePair pair(method, method_idx);
- MethodDexCacheType* methods = GetResolvedMethods();
+ auto* methods = GetResolvedMethods();
if (UNLIKELY(methods == nullptr)) {
- methods = AllocArray<MethodDexCacheType, kDexCacheMethodCacheSize>(
- ResolvedMethodsOffset(),
- NumResolvedMethodsOffset(),
- GetDexFile()->NumMethodIds(),
- LinearAllocKind::kNoGCRoots);
+ methods = AllocateResolvedMethods();
}
- SetNativePair(methods, MethodSlotIndex(method_idx), pair);
+ methods->Set(method_idx, method);
}
-template <typename T>
-NativeDexCachePair<T> DexCache::GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
- size_t idx) {
- auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
- AtomicPair<uintptr_t> value = AtomicPairLoadAcquire(&array[idx]);
- return NativeDexCachePair<T>(reinterpret_cast<T*>(value.first), value.second);
-}
-
-template <typename T>
-void DexCache::SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
- size_t idx,
- NativeDexCachePair<T> pair) {
- auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
- AtomicPair<uintptr_t> v(reinterpret_cast<size_t>(pair.object), pair.index);
- AtomicPairStoreRelease(&array[idx], v);
-}
-
-template <typename T,
- ReadBarrierOption kReadBarrierOption,
- typename Visitor>
-inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs,
+template <ReadBarrierOption kReadBarrierOption,
+ typename Visitor,
+ typename T>
+inline void VisitDexCachePairs(T* array,
size_t num_pairs,
const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
// Check both the data pointer and count since the array might be initialized
// concurrently on other thread, and we might observe just one of the values.
- for (size_t i = 0; pairs != nullptr && i < num_pairs; ++i) {
- DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed);
+ for (size_t i = 0; array != nullptr && i < num_pairs; ++i) {
+ auto source = array->GetPair(i);
// NOTE: We need the "template" keyword here to avoid a compilation
// failure. GcRoot<T> is a template argument-dependent type and we need to
// tell the compiler to treat "Read" as a template rather than a field or
// function. Otherwise, on encountering the "<" token, the compiler would
// treat "Read" as a field.
- T* const before = source.object.template Read<kReadBarrierOption>();
+ auto const before = source.object.template Read<kReadBarrierOption>();
visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier());
if (source.object.template Read<kReadBarrierOption>() != before) {
- pairs[i].store(source, std::memory_order_relaxed);
+ array->SetPair(i, source);
}
}
}
@@ -443,13 +340,13 @@ template <VerifyObjectFlags kVerifyFlags,
ReadBarrierOption kReadBarrierOption,
typename Visitor>
inline void DexCache::VisitNativeRoots(const Visitor& visitor) {
- VisitDexCachePairs<String, kReadBarrierOption, Visitor>(
+ VisitDexCachePairs<kReadBarrierOption, Visitor>(
GetStrings<kVerifyFlags>(), NumStrings<kVerifyFlags>(), visitor);
- VisitDexCachePairs<Class, kReadBarrierOption, Visitor>(
+ VisitDexCachePairs<kReadBarrierOption, Visitor>(
GetResolvedTypes<kVerifyFlags>(), NumResolvedTypes<kVerifyFlags>(), visitor);
- VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>(
+ VisitDexCachePairs<kReadBarrierOption, Visitor>(
GetResolvedMethodTypes<kVerifyFlags>(), NumResolvedMethodTypes<kVerifyFlags>(), visitor);
GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>();