diff options
Diffstat (limited to 'runtime/mirror/dex_cache.h')
-rw-r--r-- | runtime/mirror/dex_cache.h | 77 |
1 files changed, 41 insertions, 36 deletions
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h index 6f88cc5df4..e68b0c7219 100644 --- a/runtime/mirror/dex_cache.h +++ b/runtime/mirror/dex_cache.h @@ -18,14 +18,14 @@ #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ #include "array.h" -#include "art_field.h" -#include "class.h" +#include "base/bit_utils.h" #include "dex_file_types.h" #include "object.h" #include "object_array.h" namespace art { +class ArtField; class ArtMethod; struct DexCacheOffsets; class DexFile; @@ -36,6 +36,7 @@ class Thread; namespace mirror { +class Class; class MethodType; class String; @@ -60,7 +61,7 @@ template <typename T> struct PACKED(8) DexCachePair { // it's always non-null if the id branch succeeds (except for the 0th id). // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail // the lookup id == stored id branch. - DexCachePair(T* object, uint32_t index) + DexCachePair(ObjPtr<T> object, uint32_t index) : object(object), index(index) {} DexCachePair() = default; @@ -74,39 +75,28 @@ template <typename T> struct PACKED(8) DexCachePair { dex_cache[0].store(first_elem, std::memory_order_relaxed); } - static GcRoot<T> Lookup(std::atomic<DexCachePair<T>>* dex_cache, - uint32_t idx, - uint32_t cache_size) { - DCHECK_NE(cache_size, 0u); - DexCachePair<T> element = dex_cache[idx % cache_size].load(std::memory_order_relaxed); - if (idx != element.index) { - return GcRoot<T>(nullptr); - } - - DCHECK(!element.object.IsNull()); - return element.object; - } - - static void Assign(std::atomic<DexCachePair<T>>* dex_cache, - uint32_t idx, - T* object, - uint32_t cache_size) { - DCHECK_LT(idx % cache_size, cache_size); - dex_cache[idx % cache_size].store( - DexCachePair<T>(object, idx), std::memory_order_relaxed); - } - static uint32_t InvalidIndexForSlot(uint32_t slot) { // Since the cache size is a power of two, 0 will always map to slot 0. // Use 1 for slot 0 and 0 for all other slots. return (slot == 0) ? 1u : 0u; } + + T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) { + if (idx != index) { + return nullptr; + } + DCHECK(!object.IsNull()); + return object.Read(); + } }; -using StringDexCachePair = DexCachePair<mirror::String>; +using TypeDexCachePair = DexCachePair<Class>; +using TypeDexCacheType = std::atomic<TypeDexCachePair>; + +using StringDexCachePair = DexCachePair<String>; using StringDexCacheType = std::atomic<StringDexCachePair>; -using MethodTypeDexCachePair = DexCachePair<mirror::MethodType>; +using MethodTypeDexCachePair = DexCachePair<MethodType>; using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>; // C++ mirror of java.lang.DexCache. @@ -115,6 +105,11 @@ class MANAGED DexCache FINAL : public Object { // Size of java.lang.DexCache.class. static uint32_t ClassSize(PointerSize pointer_size); + // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. + static constexpr size_t kDexCacheTypeCacheSize = 1024; + static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize), + "Type dex cache size is not a power of 2."); + // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. static constexpr size_t kDexCacheStringCacheSize = 1024; static_assert(IsPowerOfTwo(kDexCacheStringCacheSize), @@ -126,6 +121,10 @@ class MANAGED DexCache FINAL : public Object { static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize), "MethodType dex cache size is not a power of 2."); + static constexpr size_t StaticTypeSize() { + return kDexCacheTypeCacheSize; + } + static constexpr size_t StaticStringSize() { return kDexCacheStringCacheSize; } @@ -156,7 +155,7 @@ class MANAGED DexCache FINAL : public Object { REQUIRES_SHARED(Locks::mutator_lock_); template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedTypes(GcRoot<mirror::Class>* dest, const Visitor& visitor) + void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_); template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> @@ -211,7 +210,7 @@ class MANAGED DexCache FINAL : public Object { return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_); } - mirror::String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE + String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE @@ -226,6 +225,8 @@ class MANAGED DexCache FINAL : public Object { void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) REQUIRES_SHARED(Locks::mutator_lock_); + void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); + ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) REQUIRES_SHARED(Locks::mutator_lock_); @@ -254,11 +255,11 @@ class MANAGED DexCache FINAL : public Object { SetFieldPtr<false>(StringsOffset(), strings); } - GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { - return GetFieldPtr<GcRoot<Class>*>(ResolvedTypesOffset()); + TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { + return GetFieldPtr<TypeDexCacheType*>(ResolvedTypesOffset()); } - void SetResolvedTypes(GcRoot<Class>* resolved_types) + void SetResolvedTypes(TypeDexCacheType* resolved_types) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types); @@ -323,7 +324,7 @@ class MANAGED DexCache FINAL : public Object { SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file); } - void SetLocation(ObjPtr<mirror::String> location) REQUIRES_SHARED(Locks::mutator_lock_); + void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_); // NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField** // provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(), @@ -340,7 +341,7 @@ class MANAGED DexCache FINAL : public Object { ObjPtr<String> location, StringDexCacheType* strings, uint32_t num_strings, - GcRoot<Class>* resolved_types, + TypeDexCacheType* resolved_types, uint32_t num_resolved_types, ArtMethod** resolved_methods, uint32_t num_resolved_methods, @@ -351,12 +352,16 @@ class MANAGED DexCache FINAL : public Object { PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_); + uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); + uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); + uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); + // Visit instance fields of the dex cache as well as its associated arrays. template <bool kVisitNativeRoots, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void VisitReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) + void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); HeapReference<Object> dex_; @@ -366,7 +371,7 @@ class MANAGED DexCache FINAL : public Object { uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with // num_resolved_method_types_ elements. uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements. - uint64_t resolved_types_; // GcRoot<Class>*, array with num_resolved_types_ elements. + uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements. uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_ // elements. |