diff options
Diffstat (limited to 'runtime/mirror/dex_cache.h')
-rw-r--r-- | runtime/mirror/dex_cache.h | 77 |
1 files changed, 36 insertions, 41 deletions
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h index 057919806f..10bb5aa01c 100644 --- a/runtime/mirror/dex_cache.h +++ b/runtime/mirror/dex_cache.h @@ -18,14 +18,14 @@ #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ #include "array.h" -#include "base/bit_utils.h" +#include "art_field.h" +#include "class.h" #include "dex_file_types.h" #include "object.h" #include "object_array.h" namespace art { -class ArtField; class ArtMethod; struct DexCacheOffsets; class DexFile; @@ -37,7 +37,6 @@ class Thread; namespace mirror { class CallSite; -class Class; class MethodType; class String; @@ -62,7 +61,7 @@ template <typename T> struct PACKED(8) DexCachePair { // it's always non-null if the id branch succeeds (except for the 0th id). // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail // the lookup id == stored id branch. - DexCachePair(ObjPtr<T> object, uint32_t index) + DexCachePair(T* object, uint32_t index) : object(object), index(index) {} DexCachePair() = default; @@ -76,28 +75,39 @@ template <typename T> struct PACKED(8) DexCachePair { dex_cache[0].store(first_elem, std::memory_order_relaxed); } + static GcRoot<T> Lookup(std::atomic<DexCachePair<T>>* dex_cache, + uint32_t idx, + uint32_t cache_size) { + DCHECK_NE(cache_size, 0u); + DexCachePair<T> element = dex_cache[idx % cache_size].load(std::memory_order_relaxed); + if (idx != element.index) { + return GcRoot<T>(nullptr); + } + + DCHECK(!element.object.IsNull()); + return element.object; + } + + static void Assign(std::atomic<DexCachePair<T>>* dex_cache, + uint32_t idx, + T* object, + uint32_t cache_size) { + DCHECK_LT(idx % cache_size, cache_size); + dex_cache[idx % cache_size].store( + DexCachePair<T>(object, idx), std::memory_order_relaxed); + } + static uint32_t InvalidIndexForSlot(uint32_t slot) { // Since the cache size is a power of two, 0 will always map to slot 0. // Use 1 for slot 0 and 0 for all other slots. return (slot == 0) ? 1u : 0u; } - - T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) { - if (idx != index) { - return nullptr; - } - DCHECK(!object.IsNull()); - return object.Read(); - } }; -using TypeDexCachePair = DexCachePair<Class>; -using TypeDexCacheType = std::atomic<TypeDexCachePair>; - -using StringDexCachePair = DexCachePair<String>; +using StringDexCachePair = DexCachePair<mirror::String>; using StringDexCacheType = std::atomic<StringDexCachePair>; -using MethodTypeDexCachePair = DexCachePair<MethodType>; +using MethodTypeDexCachePair = DexCachePair<mirror::MethodType>; using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>; // C++ mirror of java.lang.DexCache. @@ -106,11 +116,6 @@ class MANAGED DexCache FINAL : public Object { // Size of java.lang.DexCache.class. static uint32_t ClassSize(PointerSize pointer_size); - // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. - static constexpr size_t kDexCacheTypeCacheSize = 1024; - static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize), - "Type dex cache size is not a power of 2."); - // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. static constexpr size_t kDexCacheStringCacheSize = 1024; static_assert(IsPowerOfTwo(kDexCacheStringCacheSize), @@ -122,10 +127,6 @@ class MANAGED DexCache FINAL : public Object { static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize), "MethodType dex cache size is not a power of 2."); - static constexpr size_t StaticTypeSize() { - return kDexCacheTypeCacheSize; - } - static constexpr size_t StaticStringSize() { return kDexCacheStringCacheSize; } @@ -156,7 +157,7 @@ class MANAGED DexCache FINAL : public Object { REQUIRES_SHARED(Locks::mutator_lock_); template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) + void FixupResolvedTypes(GcRoot<mirror::Class>* dest, const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_); template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> @@ -223,7 +224,7 @@ class MANAGED DexCache FINAL : public Object { return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_); } - String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE + mirror::String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE @@ -238,8 +239,6 @@ class MANAGED DexCache FINAL : public Object { void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) REQUIRES_SHARED(Locks::mutator_lock_); - void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); - ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) REQUIRES_SHARED(Locks::mutator_lock_); @@ -279,11 +278,11 @@ class MANAGED DexCache FINAL : public Object { SetFieldPtr<false>(StringsOffset(), strings); } - TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { - return GetFieldPtr<TypeDexCacheType*>(ResolvedTypesOffset()); + GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { + return GetFieldPtr<GcRoot<Class>*>(ResolvedTypesOffset()); } - void SetResolvedTypes(TypeDexCacheType* resolved_types) + void SetResolvedTypes(GcRoot<Class>* resolved_types) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types); @@ -364,7 +363,7 @@ class MANAGED DexCache FINAL : public Object { SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file); } - void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_); + void SetLocation(ObjPtr<mirror::String> location) REQUIRES_SHARED(Locks::mutator_lock_); // NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField** // provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(), @@ -381,7 +380,7 @@ class MANAGED DexCache FINAL : public Object { ObjPtr<String> location, StringDexCacheType* strings, uint32_t num_strings, - TypeDexCacheType* resolved_types, + GcRoot<Class>* resolved_types, uint32_t num_resolved_types, ArtMethod** resolved_methods, uint32_t num_resolved_methods, @@ -394,16 +393,12 @@ class MANAGED DexCache FINAL : public Object { PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_); - uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); - uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); - uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); - // Visit instance fields of the dex cache as well as its associated arrays. template <bool kVisitNativeRoots, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) + void VisitReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); HeapReference<Object> dex_; @@ -415,7 +410,7 @@ class MANAGED DexCache FINAL : public Object { uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with // num_resolved_method_types_ elements. uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements. - uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements. + uint64_t resolved_types_; // GcRoot<Class>*, array with num_resolved_types_ elements. uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_ // elements. |