diff options
author | 2021-02-16 14:06:31 +0000 | |
---|---|---|
committer | 2021-02-23 15:24:27 +0000 | |
commit | 1214319d27e7fb4c4ff00b39799df6f15288098a (patch) | |
tree | 8708943822a634322bd9790c4d008210e44b3ddb /runtime/mirror/dex_cache.h | |
parent | b7cb691fb8da124e8a6276a5a7fee47dd0aaa338 (diff) |
Lazily allocate DexCache arrays.
We rarely need the DexCache for compiled code.
Delay the allocation in hope we never need it.
This reduces DexCache memory usage by ~25% at startup.
Test: m test-art-host-gtest
Test: test.py -r -b --host
Change-Id: I680a59c905c2b821ee954e4b32abd5d24876bd11
Diffstat (limited to 'runtime/mirror/dex_cache.h')
-rw-r--r-- | runtime/mirror/dex_cache.h | 66 |
1 files changed, 11 insertions, 55 deletions
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h index dd05dddc46..874e6e1432 100644 --- a/runtime/mirror/dex_cache.h +++ b/runtime/mirror/dex_cache.h @@ -18,6 +18,7 @@ #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ #include "array.h" +#include "base/array_ref.h" #include "base/bit_utils.h" #include "base/locks.h" #include "dex/dex_file_types.h" @@ -186,29 +187,13 @@ class MANAGED DexCache final : public Object { return sizeof(DexCache); } - // Initialize native fields and allocate memory. - void InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc) + void Initialize(const DexFile* dex_file, ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::dex_lock_); - // Clear all native fields. - void ResetNativeFields() REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupStrings(StringDexCacheType* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); + // Zero all array references. + // WARNING: This does not free the memory since it is in LinearAlloc. + void ResetNativeArrays() REQUIRES_SHARED(Locks::mutator_lock_); ObjPtr<String> GetLocation() REQUIRES_SHARED(Locks::mutator_lock_); @@ -278,14 +263,6 @@ class MANAGED DexCache final : public Object { void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); - void SetPreResolvedString(dex::StringIndex string_idx, - ObjPtr<mirror::String> resolved) - ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); - - // Clear the preresolved string cache to prevent further usage. - void ClearPreResolvedStrings() - ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); - // Clear a string for a string_idx, used to undo string intern transactions to make sure // the string isn't kept live. void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); @@ -329,21 +306,10 @@ class MANAGED DexCache final : public Object { return GetFieldPtr64<StringDexCacheType*, kVerifyFlags>(StringsOffset()); } - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> - GcRoot<mirror::String>* GetPreResolvedStrings() ALWAYS_INLINE - REQUIRES_SHARED(Locks::mutator_lock_) { - return GetFieldPtr64<GcRoot<mirror::String>*, kVerifyFlags>(PreResolvedStringsOffset()); - } - void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { SetFieldPtr<false>(StringsOffset(), strings); } - void SetPreResolvedStrings(GcRoot<mirror::String>* strings) - ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { - SetFieldPtr<false>(PreResolvedStringsOffset(), strings); - } - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { return GetFieldPtr<TypeDexCacheType*, kVerifyFlags>(ResolvedTypesOffset()); @@ -464,27 +430,17 @@ class MANAGED DexCache final : public Object { uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_); uint32_t MethodTypeSlotIndex(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); - // Returns true if we succeeded in adding the pre-resolved string array. - bool AddPreResolvedStringsArray() REQUIRES_SHARED(Locks::mutator_lock_); - void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) REQUIRES(Locks::mutator_lock_); void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_); + ObjPtr<ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_); + private: - void SetNativeArrays(StringDexCacheType* strings, - uint32_t num_strings, - TypeDexCacheType* resolved_types, - uint32_t num_resolved_types, - MethodDexCacheType* resolved_methods, - uint32_t num_resolved_methods, - FieldDexCacheType* resolved_fields, - uint32_t num_resolved_fields, - MethodTypeDexCacheType* resolved_method_types, - uint32_t num_resolved_method_types, - GcRoot<CallSite>* resolved_call_sites, - uint32_t num_resolved_call_sites) - REQUIRES_SHARED(Locks::mutator_lock_); + // Allocate new array in linear alloc and save it in the given fields. + template<typename T, size_t kMaxCacheSize> + T* AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num) + REQUIRES_SHARED(Locks::mutator_lock_); // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations, // so we use a custom pair class for loading and storing the NativeDexCachePair<>. |