Refactor and cleanup DexCache.

- Introduce macros to avoid duplicating code for each dex cache kind.
- Remove preResolvedStrings, this was unused.
- Remove dex cache length fields, we can easily infer them.

Test: test.py
Change-Id: I1e0bc8cf078ce8e09c4d756c63be32cb344fcce1
diff --git a/dex2oat/driver/compiler_driver_test.cc b/dex2oat/driver/compiler_driver_test.cc
index 7e3f40c..759426a 100644
--- a/dex2oat/driver/compiler_driver_test.cc
+++ b/dex2oat/driver/compiler_driver_test.cc
@@ -130,19 +130,15 @@
   ASSERT_TRUE(java_lang_dex_file_ != nullptr);
   const DexFile& dex = *java_lang_dex_file_;
   ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(soa.Self(), dex);
-  EXPECT_EQ(dex.NumStringIds(), dex_cache->NumStrings());
   for (size_t i = 0; i < dex_cache->NumStrings(); i++) {
     const ObjPtr<mirror::String> string = dex_cache->GetResolvedString(dex::StringIndex(i));
     EXPECT_TRUE(string != nullptr) << "string_idx=" << i;
   }
-  EXPECT_EQ(dex.NumTypeIds(), dex_cache->NumResolvedTypes());
   for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
     const ObjPtr<mirror::Class> type = dex_cache->GetResolvedType(dex::TypeIndex(i));
     EXPECT_TRUE(type != nullptr)
         << "type_idx=" << i << " " << dex.GetTypeDescriptor(dex.GetTypeId(dex::TypeIndex(i)));
   }
-  EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods()
-      || dex.NumMethodIds() ==  dex_cache->NumResolvedMethods());
   for (size_t i = 0; i < dex_cache->NumResolvedMethods(); i++) {
     // FIXME: This is outdated for hash-based method array.
     ArtMethod* method = dex_cache->GetResolvedMethod(i);
@@ -153,8 +149,6 @@
         << " " << dex.GetMethodDeclaringClassDescriptor(dex.GetMethodId(i)) << " "
         << dex.GetMethodName(dex.GetMethodId(i));
   }
-  EXPECT_TRUE(dex_cache->StaticArtFieldSize() == dex_cache->NumResolvedFields()
-      || dex.NumFieldIds() ==  dex_cache->NumResolvedFields());
   for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
     // FIXME: This is outdated for hash-based field array.
     ArtField* field = dex_cache->GetResolvedField(i);
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 666f86e..a6cf10c 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -677,14 +677,6 @@
     addOffset(OFFSETOF_MEMBER(mirror::DexCache, class_loader_), "classLoader");
     addOffset(OFFSETOF_MEMBER(mirror::DexCache, dex_file_), "dexFile");
     addOffset(OFFSETOF_MEMBER(mirror::DexCache, location_), "location");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_preresolved_strings_), "numPreResolvedStrings");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_call_sites_), "numResolvedCallSites");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_fields_), "numResolvedFields");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_method_types_), "numResolvedMethodTypes");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_methods_), "numResolvedMethods");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_types_), "numResolvedTypes");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_strings_), "numStrings");
-    addOffset(OFFSETOF_MEMBER(mirror::DexCache, preresolved_strings_), "preResolvedStrings");
     addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_call_sites_), "resolvedCallSites");
     addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_fields_), "resolvedFields");
     addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_method_types_), "resolvedMethodTypes");
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 402bb72..3cbfb40 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -54,10 +54,7 @@
 }
 
 template<typename T, size_t kMaxCacheSize>
-T* DexCache::AllocArray(MemberOffset obj_offset,
-                        MemberOffset num_offset,
-                        size_t num,
-                        LinearAllocKind kind) {
+T* DexCache::AllocArray(MemberOffset obj_offset, size_t num, LinearAllocKind kind) {
   num = std::min<size_t>(num, kMaxCacheSize);
   if (num == 0) {
     return nullptr;
@@ -79,7 +76,6 @@
   }
   array = reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16), kind));
   InitializeArray(array);  // Ensure other threads see the array initialized.
-  dex_cache->SetField32Volatile<false, false>(num_offset, num);
   dex_cache->SetField64Volatile<false, false>(obj_offset, reinterpret_cast64<uint64_t>(array));
   return array;
 }
@@ -89,14 +85,6 @@
     : object(object), index(index) {}
 
 template <typename T>
-inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
-  DexCachePair<T> first_elem;
-  first_elem.object = GcRoot<T>(nullptr);
-  first_elem.index = InvalidIndexForSlot(0);
-  dex_cache[0].store(first_elem, std::memory_order_relaxed);
-}
-
-template <typename T>
 inline T* DexCachePair<T>::GetObjectForIndex(uint32_t idx) {
   if (idx != index) {
     return nullptr;
@@ -106,11 +94,22 @@
 }
 
 template <typename T>
+inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
+  DexCachePair<T> first_elem;
+  first_elem.object = GcRoot<T>(nullptr);
+  first_elem.index = InvalidIndexForSlot(0);
+  dex_cache[0].store(first_elem, std::memory_order_relaxed);
+}
+
+template <typename T>
 inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache) {
   NativeDexCachePair<T> first_elem;
   first_elem.object = nullptr;
   first_elem.index = InvalidIndexForSlot(0);
-  DexCache::SetNativePair(dex_cache, 0, first_elem);
+
+  auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(dex_cache);
+  AtomicPair<uintptr_t> v(reinterpret_cast<size_t>(first_elem.object), first_elem.index);
+  AtomicPairStoreRelease(&array[0], v);
 }
 
 inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
@@ -118,34 +117,21 @@
   return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
 }
 
-inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) {
-  DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds());
-  const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize;
-  DCHECK_LT(slot_idx, NumStrings());
-  return slot_idx;
-}
-
 inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
-  StringDexCacheType* strings = GetStrings();
+  auto* strings = GetStrings();
   if (UNLIKELY(strings == nullptr)) {
     return nullptr;
   }
-  return strings[StringSlotIndex(string_idx)].load(
-      std::memory_order_relaxed).GetObjectForIndex(string_idx.index_);
+  return strings->Get(string_idx.index_);
 }
 
 inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
   DCHECK(resolved != nullptr);
-  StringDexCacheType* strings = GetStrings();
+  auto* strings = GetStrings();
   if (UNLIKELY(strings == nullptr)) {
-    strings = AllocArray<StringDexCacheType, kDexCacheStringCacheSize>(
-        StringsOffset(),
-        NumStringsOffset(),
-        GetDexFile()->NumStringIds(),
-        LinearAllocKind::kDexCacheArray);
+    strings = AllocateStrings();
   }
-  strings[StringSlotIndex(string_idx)].store(
-      StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed);
+  strings->Set(string_idx.index_, resolved.Ptr());
   Runtime* const runtime = Runtime::Current();
   if (UNLIKELY(runtime->IsActiveTransaction())) {
     DCHECK(runtime->IsAotCompiler());
@@ -157,102 +143,63 @@
 
 inline void DexCache::ClearString(dex::StringIndex string_idx) {
   DCHECK(Runtime::Current()->IsAotCompiler());
-  uint32_t slot_idx = StringSlotIndex(string_idx);
-  StringDexCacheType* strings = GetStrings();
+  auto* strings = GetStrings();
   if (UNLIKELY(strings == nullptr)) {
     return;
   }
-  StringDexCacheType* slot = &strings[slot_idx];
-  // This is racy but should only be called from the transactional interpreter.
-  if (slot->load(std::memory_order_relaxed).index == string_idx.index_) {
-    StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx));
-    slot->store(cleared, std::memory_order_relaxed);
-  }
-}
-
-inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) {
-  DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds());
-  const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize;
-  DCHECK_LT(slot_idx, NumResolvedTypes());
-  return slot_idx;
+  strings->Clear(string_idx.index_);
 }
 
 inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
   // It is theorized that a load acquire is not required since obtaining the resolved class will
   // always have an address dependency or a lock.
-  TypeDexCacheType* resolved_types = GetResolvedTypes();
+  auto* resolved_types = GetResolvedTypes();
   if (UNLIKELY(resolved_types == nullptr)) {
     return nullptr;
   }
-  return resolved_types[TypeSlotIndex(type_idx)].load(
-      std::memory_order_relaxed).GetObjectForIndex(type_idx.index_);
+  return resolved_types->Get(type_idx.index_);
 }
 
 inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) {
   DCHECK(resolved != nullptr);
   DCHECK(resolved->IsResolved()) << resolved->GetStatus();
-  TypeDexCacheType* resolved_types = GetResolvedTypes();
+  auto* resolved_types = GetResolvedTypes();
   if (UNLIKELY(resolved_types == nullptr)) {
-    resolved_types = AllocArray<TypeDexCacheType, kDexCacheTypeCacheSize>(
-        ResolvedTypesOffset(),
-        NumResolvedTypesOffset(),
-        GetDexFile()->NumTypeIds(),
-        LinearAllocKind::kDexCacheArray);
+    resolved_types = AllocateResolvedTypes();
   }
   // TODO default transaction support.
   // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a
   // class but not necessarily seeing the loaded members like the static fields array.
   // See b/32075261.
-  resolved_types[TypeSlotIndex(type_idx)].store(
-      TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release);
+  resolved_types->Set(type_idx.index_, resolved.Ptr());
   // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
   WriteBarrier::ForEveryFieldWrite(this);
 }
 
 inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
   DCHECK(Runtime::Current()->IsAotCompiler());
-  TypeDexCacheType* resolved_types = GetResolvedTypes();
+  auto* resolved_types = GetResolvedTypes();
   if (UNLIKELY(resolved_types == nullptr)) {
     return;
   }
-  uint32_t slot_idx = TypeSlotIndex(type_idx);
-  TypeDexCacheType* slot = &resolved_types[slot_idx];
-  // This is racy but should only be called from the single-threaded ImageWriter and tests.
-  if (slot->load(std::memory_order_relaxed).index == type_idx.index_) {
-    TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx));
-    slot->store(cleared, std::memory_order_relaxed);
-  }
-}
-
-inline uint32_t DexCache::MethodTypeSlotIndex(dex::ProtoIndex proto_idx) {
-  DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
-  DCHECK_LT(proto_idx.index_, GetDexFile()->NumProtoIds());
-  const uint32_t slot_idx = proto_idx.index_ % kDexCacheMethodTypeCacheSize;
-  DCHECK_LT(slot_idx, NumResolvedMethodTypes());
-  return slot_idx;
+  resolved_types->Clear(type_idx.index_);
 }
 
 inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) {
-  MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
+  auto* methods = GetResolvedMethodTypes();
   if (UNLIKELY(methods == nullptr)) {
     return nullptr;
   }
-  return methods[MethodTypeSlotIndex(proto_idx)].load(
-      std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_);
+  return methods->Get(proto_idx.index_);
 }
 
 inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) {
   DCHECK(resolved != nullptr);
-  MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
+  auto* methods = GetResolvedMethodTypes();
   if (UNLIKELY(methods == nullptr)) {
-    methods = AllocArray<MethodTypeDexCacheType, kDexCacheMethodTypeCacheSize>(
-        ResolvedMethodTypesOffset(),
-        NumResolvedMethodTypesOffset(),
-        GetDexFile()->NumProtoIds(),
-        LinearAllocKind::kDexCacheArray);
+    methods = AllocateResolvedMethodTypes();
   }
-  methods[MethodTypeSlotIndex(proto_idx)].store(
-      MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed);
+  methods->Set(proto_idx.index_, resolved);
   Runtime* const runtime = Runtime::Current();
   if (UNLIKELY(runtime->IsActiveTransaction())) {
     DCHECK(runtime->IsAotCompiler());
@@ -264,14 +211,11 @@
 
 inline void DexCache::ClearMethodType(dex::ProtoIndex proto_idx) {
   DCHECK(Runtime::Current()->IsAotCompiler());
-  uint32_t slot_idx = MethodTypeSlotIndex(proto_idx);
-  MethodTypeDexCacheType* slot = &GetResolvedMethodTypes()[slot_idx];
-  // This is racy but should only be called from the transactional interpreter.
-  if (slot->load(std::memory_order_relaxed).index == proto_idx.index_) {
-    MethodTypeDexCachePair cleared(nullptr,
-                                   MethodTypeDexCachePair::InvalidIndexForSlot(proto_idx.index_));
-    slot->store(cleared, std::memory_order_relaxed);
+  auto* methods = GetResolvedMethodTypes();
+  if (methods == nullptr) {
+    return;
   }
+  methods->Clear(proto_idx.index_);
 }
 
 inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
@@ -296,11 +240,7 @@
   GcRoot<mirror::CallSite> candidate(call_site);
   GcRoot<CallSite>* call_sites = GetResolvedCallSites();
   if (UNLIKELY(call_sites == nullptr)) {
-    call_sites = AllocArray<GcRoot<CallSite>, std::numeric_limits<size_t>::max()>(
-        ResolvedCallSitesOffset(),
-        NumResolvedCallSitesOffset(),
-        GetDexFile()->NumCallSiteIds(),
-        LinearAllocKind::kGCRootArray);
+    call_sites = AllocateResolvedCallSites();
   }
   GcRoot<mirror::CallSite>& target = call_sites[call_site_idx];
 
@@ -316,103 +256,60 @@
   }
 }
 
-inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) {
-  DCHECK_LT(field_idx, GetDexFile()->NumFieldIds());
-  const uint32_t slot_idx = field_idx % kDexCacheFieldCacheSize;
-  DCHECK_LT(slot_idx, NumResolvedFields());
-  return slot_idx;
-}
-
 inline ArtField* DexCache::GetResolvedField(uint32_t field_idx) {
-  FieldDexCacheType* fields = GetResolvedFields();
+  auto* fields = GetResolvedFields();
   if (UNLIKELY(fields == nullptr)) {
     return nullptr;
   }
-  auto pair = GetNativePair(fields, FieldSlotIndex(field_idx));
-  return pair.GetObjectForIndex(field_idx);
+  return fields->Get(field_idx);
 }
 
 inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field) {
   DCHECK(field != nullptr);
-  FieldDexCachePair pair(field, field_idx);
-  FieldDexCacheType* fields = GetResolvedFields();
+  auto* fields = GetResolvedFields();
   if (UNLIKELY(fields == nullptr)) {
-    fields = AllocArray<FieldDexCacheType, kDexCacheFieldCacheSize>(
-        ResolvedFieldsOffset(),
-        NumResolvedFieldsOffset(),
-        GetDexFile()->NumFieldIds(),
-        LinearAllocKind::kNoGCRoots);
+    fields = AllocateResolvedFields();
   }
-  SetNativePair(fields, FieldSlotIndex(field_idx), pair);
-}
-
-inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
-  DCHECK_LT(method_idx, GetDexFile()->NumMethodIds());
-  const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize;
-  DCHECK_LT(slot_idx, NumResolvedMethods());
-  return slot_idx;
+  fields->Set(field_idx, field);
 }
 
 inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx) {
-  MethodDexCacheType* methods = GetResolvedMethods();
+  auto* methods = GetResolvedMethods();
   if (UNLIKELY(methods == nullptr)) {
     return nullptr;
   }
-  auto pair = GetNativePair(methods, MethodSlotIndex(method_idx));
-  return pair.GetObjectForIndex(method_idx);
+  return methods->Get(method_idx);
 }
 
 inline void DexCache::SetResolvedMethod(uint32_t method_idx, ArtMethod* method) {
   DCHECK(method != nullptr);
-  MethodDexCachePair pair(method, method_idx);
-  MethodDexCacheType* methods = GetResolvedMethods();
+  auto* methods = GetResolvedMethods();
   if (UNLIKELY(methods == nullptr)) {
-    methods = AllocArray<MethodDexCacheType, kDexCacheMethodCacheSize>(
-        ResolvedMethodsOffset(),
-        NumResolvedMethodsOffset(),
-        GetDexFile()->NumMethodIds(),
-        LinearAllocKind::kNoGCRoots);
+    methods = AllocateResolvedMethods();
   }
-  SetNativePair(methods, MethodSlotIndex(method_idx), pair);
+  methods->Set(method_idx, method);
 }
 
-template <typename T>
-NativeDexCachePair<T> DexCache::GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
-                                              size_t idx) {
-  auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
-  AtomicPair<uintptr_t> value = AtomicPairLoadAcquire(&array[idx]);
-  return NativeDexCachePair<T>(reinterpret_cast<T*>(value.first), value.second);
-}
-
-template <typename T>
-void DexCache::SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
-                             size_t idx,
-                             NativeDexCachePair<T> pair) {
-  auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
-  AtomicPair<uintptr_t> v(reinterpret_cast<size_t>(pair.object), pair.index);
-  AtomicPairStoreRelease(&array[idx], v);
-}
-
-template <typename T,
-          ReadBarrierOption kReadBarrierOption,
-          typename Visitor>
-inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs,
+template <ReadBarrierOption kReadBarrierOption,
+          typename Visitor,
+          typename T>
+inline void VisitDexCachePairs(T* array,
                                size_t num_pairs,
                                const Visitor& visitor)
     REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
   // Check both the data pointer and count since the array might be initialized
   // concurrently on other thread, and we might observe just one of the values.
-  for (size_t i = 0; pairs != nullptr && i < num_pairs; ++i) {
-    DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed);
+  for (size_t i = 0; array != nullptr && i < num_pairs; ++i) {
+    auto source = array->GetPair(i);
     // NOTE: We need the "template" keyword here to avoid a compilation
     // failure. GcRoot<T> is a template argument-dependent type and we need to
     // tell the compiler to treat "Read" as a template rather than a field or
     // function. Otherwise, on encountering the "<" token, the compiler would
     // treat "Read" as a field.
-    T* const before = source.object.template Read<kReadBarrierOption>();
+    auto const before = source.object.template Read<kReadBarrierOption>();
     visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier());
     if (source.object.template Read<kReadBarrierOption>() != before) {
-      pairs[i].store(source, std::memory_order_relaxed);
+      array->SetPair(i, source);
     }
   }
 }
@@ -443,13 +340,13 @@
           ReadBarrierOption kReadBarrierOption,
           typename Visitor>
 inline void DexCache::VisitNativeRoots(const Visitor& visitor) {
-  VisitDexCachePairs<String, kReadBarrierOption, Visitor>(
+  VisitDexCachePairs<kReadBarrierOption, Visitor>(
       GetStrings<kVerifyFlags>(), NumStrings<kVerifyFlags>(), visitor);
 
-  VisitDexCachePairs<Class, kReadBarrierOption, Visitor>(
+  VisitDexCachePairs<kReadBarrierOption, Visitor>(
       GetResolvedTypes<kVerifyFlags>(), NumResolvedTypes<kVerifyFlags>(), visitor);
 
-  VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>(
+  VisitDexCachePairs<kReadBarrierOption, Visitor>(
       GetResolvedMethodTypes<kVerifyFlags>(), NumResolvedMethodTypes<kVerifyFlags>(), visitor);
 
   GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>();
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index b7f8ee7..1d9fecd 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -52,45 +52,47 @@
 
 void DexCache::VisitReflectiveTargets(ReflectiveValueVisitor* visitor) {
   bool wrote = false;
-  FieldDexCacheType* fields = GetResolvedFields();
+  auto* fields = GetResolvedFields();
   size_t num_fields = NumResolvedFields();
   // Check both the data pointer and count since the array might be initialized
   // concurrently on other thread, and we might observe just one of the values.
   for (size_t i = 0; fields != nullptr && i < num_fields; i++) {
-    auto pair(GetNativePair(fields, i));
-    if (pair.index == FieldDexCachePair::InvalidIndexForSlot(i)) {
+    auto pair(fields->GetNativePair(i));
+    if (pair.index == NativeDexCachePair<ArtField>::InvalidIndexForSlot(i)) {
       continue;
     }
     ArtField* new_val = visitor->VisitField(
         pair.object, DexCacheSourceInfo(kSourceDexCacheResolvedField, pair.index, this));
     if (UNLIKELY(new_val != pair.object)) {
       if (new_val == nullptr) {
-        pair = FieldDexCachePair(nullptr, FieldDexCachePair::InvalidIndexForSlot(i));
+        pair = NativeDexCachePair<ArtField>(
+            nullptr, NativeDexCachePair<ArtField>::InvalidIndexForSlot(i));
       } else {
         pair.object = new_val;
       }
-      SetNativePair(fields, i, pair);
+      fields->SetNativePair(i, pair);
       wrote = true;
     }
   }
-  MethodDexCacheType* methods = GetResolvedMethods();
+  auto* methods = GetResolvedMethods();
   size_t num_methods = NumResolvedMethods();
   // Check both the data pointer and count since the array might be initialized
   // concurrently on other thread, and we might observe just one of the values.
   for (size_t i = 0; methods != nullptr && i < num_methods; i++) {
-    auto pair(GetNativePair(methods, i));
-    if (pair.index == MethodDexCachePair::InvalidIndexForSlot(i)) {
+    auto pair(methods->GetNativePair(i));
+    if (pair.index == NativeDexCachePair<ArtMethod>::InvalidIndexForSlot(i)) {
       continue;
     }
     ArtMethod* new_val = visitor->VisitMethod(
         pair.object, DexCacheSourceInfo(kSourceDexCacheResolvedMethod, pair.index, this));
     if (UNLIKELY(new_val != pair.object)) {
       if (new_val == nullptr) {
-        pair = MethodDexCachePair(nullptr, MethodDexCachePair::InvalidIndexForSlot(i));
+        pair = NativeDexCachePair<ArtMethod>(
+            nullptr, NativeDexCachePair<ArtMethod>::InvalidIndexForSlot(i));
       } else {
         pair.object = new_val;
       }
-      SetNativePair(methods, i, pair);
+      methods->SetNativePair(i, pair);
       wrote = true;
     }
   }
@@ -106,12 +108,6 @@
   SetResolvedFields(nullptr);
   SetResolvedMethodTypes(nullptr);
   SetResolvedCallSites(nullptr);
-  SetField32<false>(NumStringsOffset(), 0);
-  SetField32<false>(NumResolvedTypesOffset(), 0);
-  SetField32<false>(NumResolvedMethodsOffset(), 0);
-  SetField32<false>(NumResolvedFieldsOffset(), 0);
-  SetField32<false>(NumResolvedMethodTypesOffset(), 0);
-  SetField32<false>(NumResolvedCallSitesOffset(), 0);
 }
 
 void DexCache::SetLocation(ObjPtr<mirror::String> location) {
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 7c7b11f..7af0fa2 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -19,15 +19,17 @@
 
 #include "array.h"
 #include "base/array_ref.h"
+#include "base/atomic_pair.h"
 #include "base/bit_utils.h"
 #include "base/locks.h"
+#include "dex/dex_file.h"
 #include "dex/dex_file_types.h"
 #include "gc_root.h"  // Note: must not use -inl here to avoid circular dependency.
+#include "linear_alloc.h"
 #include "object.h"
 #include "object_array.h"
 
 namespace art {
-enum class LinearAllocKind : uint32_t;
 
 namespace linker {
 class ImageWriter;
@@ -46,6 +48,7 @@
 class CallSite;
 class Class;
 class ClassLoader;
+class DexCache;
 class MethodType;
 class String;
 
@@ -115,20 +118,92 @@
   }
 };
 
-using TypeDexCachePair = DexCachePair<Class>;
-using TypeDexCacheType = std::atomic<TypeDexCachePair>;
+template <typename T, size_t size> class NativeDexCachePairArray {
+ public:
+  NativeDexCachePairArray() {}
 
-using StringDexCachePair = DexCachePair<String>;
-using StringDexCacheType = std::atomic<StringDexCachePair>;
+  T* Get(uint32_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
+    auto pair = GetNativePair(entries_, SlotIndex(index));
+    return pair.GetObjectForIndex(index);
+  }
 
-using FieldDexCachePair = NativeDexCachePair<ArtField>;
-using FieldDexCacheType = std::atomic<FieldDexCachePair>;
+  void Set(uint32_t index, T* value) {
+    NativeDexCachePair<T> pair(value, index);
+    SetNativePair(entries_, SlotIndex(index), pair);
+  }
 
-using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
-using MethodDexCacheType = std::atomic<MethodDexCachePair>;
+  NativeDexCachePair<T> GetNativePair(uint32_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
+    return GetNativePair(entries_, SlotIndex(index));
+  }
 
-using MethodTypeDexCachePair = DexCachePair<MethodType>;
-using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>;
+  void SetNativePair(uint32_t index, NativeDexCachePair<T> value) {
+    SetNativePair(entries_, SlotIndex(index), value);
+  }
+
+ private:
+  NativeDexCachePair<T> GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array, size_t idx) {
+    auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
+    AtomicPair<uintptr_t> value = AtomicPairLoadAcquire(&array[idx]);
+    return NativeDexCachePair<T>(reinterpret_cast<T*>(value.first), value.second);
+  }
+
+  void SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
+                     size_t idx,
+                     NativeDexCachePair<T> pair) {
+    auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
+    AtomicPair<uintptr_t> v(reinterpret_cast<size_t>(pair.object), pair.index);
+    AtomicPairStoreRelease(&array[idx], v);
+  }
+
+  uint32_t SlotIndex(uint32_t index) {
+    return index % size;
+  }
+
+  std::atomic<NativeDexCachePair<T>> entries_[0];
+
+  NativeDexCachePairArray(const NativeDexCachePairArray<T, size>&) = delete;
+  NativeDexCachePairArray& operator=(const NativeDexCachePairArray<T, size>&) = delete;
+};
+
+template <typename T, size_t size> class DexCachePairArray {
+ public:
+  DexCachePairArray() {}
+
+  T* Get(uint32_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
+    return GetPair(index).GetObjectForIndex(index);
+  }
+
+  void Set(uint32_t index, T* value) {
+    SetPair(index, DexCachePair<T>(value, index));
+  }
+
+  DexCachePair<T> GetPair(uint32_t index) {
+    return entries_[SlotIndex(index)].load(std::memory_order_relaxed);
+  }
+
+  void SetPair(uint32_t index, DexCachePair<T> value) {
+    entries_[SlotIndex(index)].store(value, std::memory_order_relaxed);
+  }
+
+  void Clear(uint32_t index) {
+    uint32_t slot = SlotIndex(index);
+    // This is racy but should only be called from the transactional interpreter.
+    if (entries_[slot].load(std::memory_order_relaxed).index == index) {
+      DexCachePair<T> cleared(nullptr, DexCachePair<T>::InvalidIndexForSlot(slot));
+      entries_[slot].store(cleared, std::memory_order_relaxed);
+    }
+  }
+
+ private:
+  uint32_t SlotIndex(uint32_t index) {
+    return index % size;
+  }
+
+  std::atomic<DexCachePair<T>> entries_[0];
+
+  DexCachePairArray(const DexCachePairArray<T, size>&) = delete;
+  DexCachePairArray& operator=(const DexCachePairArray<T, size>&) = delete;
+};
 
 // C++ mirror of java.lang.DexCache.
 class MANAGED DexCache final : public Object {
@@ -164,26 +239,6 @@
   static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize),
                 "MethodType dex cache size is not a power of 2.");
 
-  static constexpr size_t StaticTypeSize() {
-    return kDexCacheTypeCacheSize;
-  }
-
-  static constexpr size_t StaticStringSize() {
-    return kDexCacheStringCacheSize;
-  }
-
-  static constexpr size_t StaticArtFieldSize() {
-    return kDexCacheFieldCacheSize;
-  }
-
-  static constexpr size_t StaticMethodSize() {
-    return kDexCacheMethodCacheSize;
-  }
-
-  static constexpr size_t StaticMethodTypeSize() {
-    return kDexCacheMethodTypeCacheSize;
-  }
-
   // Size of an instance of java.lang.DexCache not including referenced values.
   static constexpr uint32_t InstanceSize() {
     return sizeof(DexCache);
@@ -196,7 +251,6 @@
                                      DexCachePair<Object>* pairs_end)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-
   void Initialize(const DexFile* dex_file, ObjPtr<ClassLoader> class_loader)
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(Locks::dex_lock_);
@@ -209,66 +263,6 @@
            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   ObjPtr<String> GetLocation() REQUIRES_SHARED(Locks::mutator_lock_);
 
-  static constexpr MemberOffset StringsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_);
-  }
-
-  static constexpr MemberOffset PreResolvedStringsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, preresolved_strings_);
-  }
-
-  static constexpr MemberOffset ResolvedTypesOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_);
-  }
-
-  static constexpr MemberOffset ResolvedFieldsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_);
-  }
-
-  static constexpr MemberOffset ResolvedMethodsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_);
-  }
-
-  static constexpr MemberOffset ResolvedMethodTypesOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_);
-  }
-
-  static constexpr MemberOffset ResolvedCallSitesOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_);
-  }
-
-  static constexpr MemberOffset NumStringsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_);
-  }
-
-  static constexpr MemberOffset NumPreResolvedStringsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, num_preresolved_strings_);
-  }
-
-  static constexpr MemberOffset NumResolvedTypesOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_);
-  }
-
-  static constexpr MemberOffset NumResolvedFieldsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_);
-  }
-
-  static constexpr MemberOffset NumResolvedMethodsOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_);
-  }
-
-  static constexpr MemberOffset NumResolvedMethodTypesOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_);
-  }
-
-  static constexpr MemberOffset NumResolvedCallSitesOffset() {
-    return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_);
-  }
-
-  static constexpr size_t PreResolvedStringsAlignment() {
-    return alignof(GcRoot<mirror::String>);
-  }
-
   String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -317,106 +311,6 @@
   ObjPtr<CallSite> SetResolvedCallSite(uint32_t call_site_idx, ObjPtr<CallSite> resolved)
       REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
 
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetFieldPtr64<StringDexCacheType*, kVerifyFlags>(StringsOffset());
-  }
-
-  void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
-    SetFieldPtr<false>(StringsOffset(), strings);
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetFieldPtr<TypeDexCacheType*, kVerifyFlags>(ResolvedTypesOffset());
-  }
-
-  void SetResolvedTypes(TypeDexCacheType* resolved_types)
-      ALWAYS_INLINE
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
-  }
-
-  MethodDexCacheType* GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetFieldPtr<MethodDexCacheType*>(ResolvedMethodsOffset());
-  }
-
-  void SetResolvedMethods(MethodDexCacheType* resolved_methods)
-      ALWAYS_INLINE
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods);
-  }
-
-  FieldDexCacheType* GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetFieldPtr<FieldDexCacheType*>(ResolvedFieldsOffset());
-  }
-
-  void SetResolvedFields(FieldDexCacheType* resolved_fields)
-      ALWAYS_INLINE
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields);
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  MethodTypeDexCacheType* GetResolvedMethodTypes()
-      ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetFieldPtr64<MethodTypeDexCacheType*, kVerifyFlags>(ResolvedMethodTypesOffset());
-  }
-
-  void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types)
-      ALWAYS_INLINE
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types);
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  GcRoot<CallSite>* GetResolvedCallSites()
-      ALWAYS_INLINE
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetFieldPtr<GcRoot<CallSite>*, kVerifyFlags>(ResolvedCallSitesOffset());
-  }
-
-  void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites)
-      ALWAYS_INLINE
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites);
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetField32<kVerifyFlags>(NumStringsOffset());
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  size_t NumPreResolvedStrings() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetField32<kVerifyFlags>(NumPreResolvedStringsOffset());
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetField32<kVerifyFlags>(NumResolvedTypesOffset());
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetField32<kVerifyFlags>(NumResolvedMethodsOffset());
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetField32<kVerifyFlags>(NumResolvedFieldsOffset());
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetField32<kVerifyFlags>(NumResolvedMethodTypesOffset());
-  }
-
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetField32<kVerifyFlags>(NumResolvedCallSitesOffset());
-  }
-
   const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
     return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_));
   }
@@ -427,25 +321,6 @@
 
   void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template <typename T>
-  static NativeDexCachePair<T> GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
-                                             size_t idx);
-
-  template <typename T>
-  static void SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
-                            size_t idx,
-                            NativeDexCachePair<T> pair);
-
-  static size_t PreResolvedStringsSize(size_t num_strings) {
-    return sizeof(GcRoot<mirror::String>) * num_strings;
-  }
-
-  uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
-  uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
-  uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_);
-  uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_);
-  uint32_t MethodTypeSlotIndex(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
-
   void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) REQUIRES(Locks::mutator_lock_);
 
   void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -458,10 +333,108 @@
   void VisitNativeRoots(const Visitor& visitor)
       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
 
+  // NOLINTBEGIN(bugprone-macro-parentheses)
+#define DEFINE_PAIR_ARRAY(name, pair_kind, getter_setter, type, size, ids, alloc_kind) \
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> \
+  pair_kind ##Array<type, size>* Get ##getter_setter() \
+      ALWAYS_INLINE \
+      REQUIRES_SHARED(Locks::mutator_lock_) { \
+    return GetFieldPtr<pair_kind ##Array<type, size>*, kVerifyFlags>(getter_setter ##Offset()); \
+  } \
+  void Set ##getter_setter(pair_kind ##Array<type, size>* value) \
+      REQUIRES_SHARED(Locks::mutator_lock_) { \
+    SetFieldPtr<false>(getter_setter ##Offset(), value); \
+  } \
+  static constexpr MemberOffset getter_setter ##Offset() { \
+    return OFFSET_OF_OBJECT_MEMBER(DexCache, name); \
+  } \
+  pair_kind ##Array<type, size>* Allocate ##getter_setter() \
+      REQUIRES_SHARED(Locks::mutator_lock_) { \
+    return reinterpret_cast<pair_kind ##Array<type, size>*>( \
+        AllocArray<std::atomic<pair_kind<type>>, size>( \
+            getter_setter ##Offset(), GetDexFile()->ids(), alloc_kind)); \
+  } \
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> \
+  size_t Num ##getter_setter() REQUIRES_SHARED(Locks::mutator_lock_) { \
+    return Get ##getter_setter() == nullptr ? 0u : std::min<size_t>(GetDexFile()->ids(), size); \
+  } \
+
+#define DEFINE_ARRAY(name, getter_setter, type, ids, alloc_kind) \
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> \
+  type* Get ##getter_setter() \
+      ALWAYS_INLINE \
+      REQUIRES_SHARED(Locks::mutator_lock_) { \
+    return GetFieldPtr<type*, kVerifyFlags>(getter_setter ##Offset()); \
+  } \
+  void Set ##getter_setter(type* value) \
+      REQUIRES_SHARED(Locks::mutator_lock_) { \
+    SetFieldPtr<false>(getter_setter ##Offset(), value); \
+  } \
+  static constexpr MemberOffset getter_setter ##Offset() { \
+    return OFFSET_OF_OBJECT_MEMBER(DexCache, name); \
+  } \
+  type* Allocate ##getter_setter() \
+      REQUIRES_SHARED(Locks::mutator_lock_) { \
+    return reinterpret_cast<type*>(AllocArray<type, std::numeric_limits<size_t>::max()>( \
+        getter_setter ##Offset(), GetDexFile()->ids(), alloc_kind)); \
+  } \
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> \
+  size_t Num ##getter_setter() REQUIRES_SHARED(Locks::mutator_lock_) { \
+    return GetDexFile()->ids(); \
+  } \
+
+  DEFINE_ARRAY(resolved_call_sites_,
+               ResolvedCallSites,
+               GcRoot<CallSite>,
+               NumCallSiteIds,
+               LinearAllocKind::kGCRootArray)
+
+  DEFINE_PAIR_ARRAY(resolved_fields_,
+                    NativeDexCachePair,
+                    ResolvedFields,
+                    ArtField,
+                    kDexCacheFieldCacheSize,
+                    NumFieldIds,
+                    LinearAllocKind::kNoGCRoots)
+
+  DEFINE_PAIR_ARRAY(resolved_method_types_,
+                    DexCachePair,
+                    ResolvedMethodTypes,
+                    mirror::MethodType,
+                    kDexCacheMethodTypeCacheSize,
+                    NumProtoIds,
+                    LinearAllocKind::kDexCacheArray);
+
+  DEFINE_PAIR_ARRAY(resolved_methods_,
+                    NativeDexCachePair,
+                    ResolvedMethods,
+                    ArtMethod,
+                    kDexCacheMethodCacheSize,
+                    NumMethodIds,
+                    LinearAllocKind::kNoGCRoots)
+
+  DEFINE_PAIR_ARRAY(resolved_types_,
+                    DexCachePair,
+                    ResolvedTypes,
+                    mirror::Class,
+                    kDexCacheTypeCacheSize,
+                    NumTypeIds,
+                    LinearAllocKind::kDexCacheArray);
+
+  DEFINE_PAIR_ARRAY(strings_,
+                    DexCachePair,
+                    Strings,
+                    mirror::String,
+                    kDexCacheStringCacheSize,
+                    NumStringIds,
+                    LinearAllocKind::kDexCacheArray);
+
+// NOLINTEND(bugprone-macro-parentheses)
+
  private:
   // Allocate new array in linear alloc and save it in the given fields.
   template<typename T, size_t kMaxCacheSize>
-  T* AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num, LinearAllocKind kind)
+  T* AllocArray(MemberOffset obj_offset, size_t num, LinearAllocKind kind)
      REQUIRES_SHARED(Locks::mutator_lock_);
 
   // Visit instance fields of the dex cache as well as its associated arrays.
@@ -476,26 +449,13 @@
   HeapReference<String> location_;
 
   uint64_t dex_file_;                // const DexFile*
-  uint64_t preresolved_strings_;     // GcRoot<mirror::String*> array with num_preresolved_strings
-                                     // elements.
-  uint64_t resolved_call_sites_;     // GcRoot<CallSite>* array with num_resolved_call_sites_
-                                     // elements.
-  uint64_t resolved_fields_;         // std::atomic<FieldDexCachePair>*, array with
-                                     // num_resolved_fields_ elements.
-  uint64_t resolved_method_types_;   // std::atomic<MethodTypeDexCachePair>* array with
-                                     // num_resolved_method_types_ elements.
-  uint64_t resolved_methods_;        // ArtMethod*, array with num_resolved_methods_ elements.
-  uint64_t resolved_types_;          // TypeDexCacheType*, array with num_resolved_types_ elements.
-  uint64_t strings_;                 // std::atomic<StringDexCachePair>*, array with num_strings_
-                                     // elements.
 
-  uint32_t num_preresolved_strings_;    // Number of elements in the preresolved_strings_ array.
-  uint32_t num_resolved_call_sites_;    // Number of elements in the call_sites_ array.
-  uint32_t num_resolved_fields_;        // Number of elements in the resolved_fields_ array.
-  uint32_t num_resolved_method_types_;  // Number of elements in the resolved_method_types_ array.
-  uint32_t num_resolved_methods_;       // Number of elements in the resolved_methods_ array.
-  uint32_t num_resolved_types_;         // Number of elements in the resolved_types_ array.
-  uint32_t num_strings_;                // Number of elements in the strings_ array.
+  uint64_t resolved_call_sites_;     // Array of call sites
+  uint64_t resolved_fields_;         // NativeDexCacheArray holding ArtField's
+  uint64_t resolved_method_types_;   // DexCacheArray holding mirror::MethodType's
+  uint64_t resolved_methods_;        // NativeDexCacheArray holding ArtMethod's
+  uint64_t resolved_types_;          // DexCacheArray holding mirror::Class's
+  uint64_t strings_;                 // DexCacheArray holding mirror::String's
 
   friend struct art::DexCacheOffsets;  // for verifying offset information
   friend class linker::ImageWriter;
diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc
index a0e8fda..bc695fa 100644
--- a/runtime/mirror/dex_cache_test.cc
+++ b/runtime/mirror/dex_cache_test.cc
@@ -147,10 +147,10 @@
   // methods. It must therefore contain precisely two method IDs.
   ASSERT_EQ(2u, dex_file.NumProtoIds());
   ASSERT_EQ(dex_file.NumProtoIds(), dex_cache->NumResolvedMethodTypes());
-  MethodTypeDexCacheType* method_types_cache = dex_cache->GetResolvedMethodTypes();
+  auto* method_types_cache = dex_cache->GetResolvedMethodTypes();
 
   for (size_t i = 0; i < dex_file.NumProtoIds(); ++i) {
-    const MethodTypeDexCachePair pair = method_types_cache[i].load(std::memory_order_relaxed);
+    const DexCachePair<MethodType> pair = method_types_cache->GetPair(i);
     if (dex::ProtoIndex(pair.index) == method1_id.proto_idx_) {
       ASSERT_EQ(method1_type.Get(), pair.object.Read());
     } else if (dex::ProtoIndex(pair.index) == method2_id.proto_idx_) {
diff --git a/test/497-inlining-and-class-loader/clear_dex_cache.cc b/test/497-inlining-and-class-loader/clear_dex_cache.cc
index 36ec4eb..ddbcef5 100644
--- a/test/497-inlining-and-class-loader/clear_dex_cache.cc
+++ b/test/497-inlining-and-class-loader/clear_dex_cache.cc
@@ -34,7 +34,7 @@
   ScopedObjectAccess soa(Thread::Current());
   ObjPtr<mirror::DexCache> dex_cache = soa.Decode<mirror::Class>(cls)->GetDexCache();
   size_t num_methods = dex_cache->NumResolvedMethods();
-  mirror::MethodDexCacheType* methods = dex_cache->GetResolvedMethods();
+  auto* methods = dex_cache->GetResolvedMethods();
   CHECK_EQ(num_methods != 0u, methods != nullptr);
   if (num_methods == 0u) {
     return nullptr;
@@ -48,7 +48,7 @@
   CHECK(array != nullptr);
   ObjPtr<mirror::Array> decoded_array = soa.Decode<mirror::Array>(array);
   for (size_t i = 0; i != num_methods; ++i) {
-    auto pair = mirror::DexCache::GetNativePair(methods, i);
+    auto pair = methods->GetNativePair(i);
     uint32_t index = pair.index;
     ArtMethod* method = pair.object;
     if (sizeof(void*) == 4) {
@@ -69,7 +69,7 @@
   ScopedObjectAccess soa(Thread::Current());
   ObjPtr<mirror::DexCache> dex_cache = soa.Decode<mirror::Class>(cls)->GetDexCache();
   size_t num_methods = dex_cache->NumResolvedMethods();
-  mirror::MethodDexCacheType* methods = dex_cache->GetResolvedMethods();
+  auto* methods = dex_cache->GetResolvedMethods();
   CHECK_EQ(num_methods != 0u, methods != nullptr);
   ObjPtr<mirror::Array> old = soa.Decode<mirror::Array>(old_cache);
   CHECK_EQ(methods != nullptr, old != nullptr);
@@ -86,8 +86,8 @@
       index = dchecked_integral_cast<uint32_t>(long_array->Get(2u * i));
       method = reinterpret_cast64<ArtMethod*>(long_array->Get(2u * i + 1u));
     }
-    mirror::MethodDexCachePair pair(method, index);
-    mirror::DexCache::SetNativePair(methods, i, pair);
+    mirror::NativeDexCachePair<ArtMethod> pair(method, index);
+    methods->SetNativePair(i, pair);
   }
 }
 
diff --git a/test/626-const-class-linking/clear_dex_cache_types.cc b/test/626-const-class-linking/clear_dex_cache_types.cc
index 1aa3cce..ef230ad 100644
--- a/test/626-const-class-linking/clear_dex_cache_types.cc
+++ b/test/626-const-class-linking/clear_dex_cache_types.cc
@@ -28,8 +28,7 @@
   ScopedObjectAccess soa(Thread::Current());
   ObjPtr<mirror::DexCache> dex_cache = soa.Decode<mirror::Class>(cls)->GetDexCache();
   for (size_t i = 0, num_types = dex_cache->NumResolvedTypes(); i != num_types; ++i) {
-    mirror::TypeDexCachePair cleared(nullptr, mirror::TypeDexCachePair::InvalidIndexForSlot(i));
-    dex_cache->GetResolvedTypes()[i].store(cleared, std::memory_order_relaxed);
+    dex_cache->GetResolvedTypes()->Clear(i);
   }
 }