Remove DexCache arrays from image.

Remove the hashtable storage from the image and allocate it at
runtime instead (but keep the DexCache object in the image).

For compiled code, we have largely moved to using .bss, so the
DexCache just costs us unnecessary extra space and dirty pages.

For interpreted code, the hashtables are too small and will be
overridden many times over at run-time regardless.

The next step will be to make DexCache variable-size so it can
adapt to both of the extremes (taking minimal amount of memory
for compiled code and avoiding cache evictions in interpreter).

Test: test.py --host
Change-Id: I9f89e8f19829b812cf85dea1a964259ed8b87f4d
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index b03755c..ee64eda 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -143,7 +143,6 @@
 #include "thread_list.h"
 #include "trace.h"
 #include "transaction.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
 #include "verifier/class_verifier.h"
 #include "well_known_classes.h"
 
@@ -1518,7 +1517,6 @@
 template <typename Visitor>
 static void VisitInternedStringReferences(
     gc::space::ImageSpace* space,
-    bool use_preresolved_strings,
     const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
   const uint8_t* target_base = space->Begin();
   const ImageSection& sro_section =
@@ -1535,75 +1533,26 @@
   for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
     uint32_t base_offset = sro_base[offset_index].first;
 
-    if (HasDexCacheStringNativeRefTag(base_offset)) {
-      base_offset = ClearDexCacheNativeRefTags(base_offset);
-      DCHECK_ALIGNED(base_offset, 2);
+    uint32_t raw_member_offset = sro_base[offset_index].second;
+    DCHECK_ALIGNED(base_offset, 2);
+    DCHECK_ALIGNED(raw_member_offset, 2);
 
-      ObjPtr<mirror::DexCache> dex_cache =
-          reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
-      uint32_t string_slot_index = sro_base[offset_index].second;
-
-      mirror::StringDexCachePair source =
-          dex_cache->GetStrings()[string_slot_index].load(std::memory_order_relaxed);
-      ObjPtr<mirror::String> referred_string = source.object.Read();
-      DCHECK(referred_string != nullptr);
-
-      ObjPtr<mirror::String> visited = visitor(referred_string);
-      if (visited != referred_string) {
-        // Because we are not using a helper function we need to mark the GC card manually.
-        WriteBarrier::ForEveryFieldWrite(dex_cache);
-        dex_cache->GetStrings()[string_slot_index].store(
-            mirror::StringDexCachePair(visited, source.index), std::memory_order_relaxed);
-      }
-    } else if (HasDexCachePreResolvedStringNativeRefTag(base_offset)) {
-      if (use_preresolved_strings) {
-        base_offset = ClearDexCacheNativeRefTags(base_offset);
-        DCHECK_ALIGNED(base_offset, 2);
-
-        ObjPtr<mirror::DexCache> dex_cache =
-            reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
-        uint32_t string_index = sro_base[offset_index].second;
-
-        GcRoot<mirror::String>* preresolved_strings =
-            dex_cache->GetPreResolvedStrings();
-        // Handle calls to ClearPreResolvedStrings that might occur concurrently by the profile
-        // saver that runs shortly after startup. In case the strings are cleared, there is nothing
-        // to fix up.
-        if (preresolved_strings != nullptr) {
-          ObjPtr<mirror::String> referred_string =
-              preresolved_strings[string_index].Read();
-          if (referred_string != nullptr) {
-            ObjPtr<mirror::String> visited = visitor(referred_string);
-            if (visited != referred_string) {
-              // Because we are not using a helper function we need to mark the GC card manually.
-              WriteBarrier::ForEveryFieldWrite(dex_cache);
-              preresolved_strings[string_index] = GcRoot<mirror::String>(visited);
-            }
-          }
-        }
-      }
-    } else {
-      uint32_t raw_member_offset = sro_base[offset_index].second;
-      DCHECK_ALIGNED(base_offset, 2);
-      DCHECK_ALIGNED(raw_member_offset, 2);
-
-      ObjPtr<mirror::Object> obj_ptr =
-          reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
-      MemberOffset member_offset(raw_member_offset);
-      ObjPtr<mirror::String> referred_string =
-          obj_ptr->GetFieldObject<mirror::String,
-                                  kVerifyNone,
-                                  kWithoutReadBarrier,
-                                  /* kIsVolatile= */ false>(member_offset);
-      DCHECK(referred_string != nullptr);
-
-      ObjPtr<mirror::String> visited = visitor(referred_string);
-      if (visited != referred_string) {
-        obj_ptr->SetFieldObject</* kTransactionActive= */ false,
-                                /* kCheckTransaction= */ false,
+    ObjPtr<mirror::Object> obj_ptr =
+        reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
+    MemberOffset member_offset(raw_member_offset);
+    ObjPtr<mirror::String> referred_string =
+        obj_ptr->GetFieldObject<mirror::String,
                                 kVerifyNone,
-                                /* kIsVolatile= */ false>(member_offset, visited);
-      }
+                                kWithoutReadBarrier,
+                                /* kIsVolatile= */ false>(member_offset);
+    DCHECK(referred_string != nullptr);
+
+    ObjPtr<mirror::String> visited = visitor(referred_string);
+    if (visited != referred_string) {
+      obj_ptr->SetFieldObject</* kTransactionActive= */ false,
+                              /* kCheckTransaction= */ false,
+                              kVerifyNone,
+                              /* kIsVolatile= */ false>(member_offset, visited);
     }
   }
 }
@@ -1621,7 +1570,6 @@
   size_t num_recorded_refs = 0u;
   VisitInternedStringReferences(
       space,
-      /*use_preresolved_strings=*/ true,
       [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
           REQUIRES_SHARED(Locks::mutator_lock_) {
         auto it = image_interns.find(GcRoot<mirror::String>(str));
@@ -1643,8 +1591,7 @@
       ClassLinker* class_linker,
       gc::space::ImageSpace* space,
       Handle<mirror::ClassLoader> class_loader,
-      Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
-      ClassTable::ClassSet* new_class_set)
+      Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
       REQUIRES(!Locks::dex_lock_)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -1656,8 +1603,7 @@
     ClassLinker* class_linker,
     gc::space::ImageSpace* space,
     Handle<mirror::ClassLoader> class_loader,
-    Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
-    ClassTable::ClassSet* new_class_set)
+    Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
     REQUIRES(!Locks::dex_lock_)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   ScopedTrace app_image_timing("AppImage:Updating");
@@ -1672,7 +1618,6 @@
   Runtime* const runtime = Runtime::Current();
   gc::Heap* const heap = runtime->GetHeap();
   const ImageHeader& header = space->GetImageHeader();
-  bool load_app_image_startup_cache = runtime->LoadAppImageStartupCache();
   {
     // Register dex caches with the class loader.
     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
@@ -1683,56 +1628,6 @@
         CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
         class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
       }
-
-      if (!load_app_image_startup_cache) {
-        dex_cache->ClearPreResolvedStrings();
-      }
-
-      if (kIsDebugBuild) {
-        CHECK(new_class_set != nullptr);
-        mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
-        const size_t num_types = dex_cache->NumResolvedTypes();
-        for (size_t j = 0; j != num_types; ++j) {
-          // The image space is not yet added to the heap, avoid read barriers.
-          ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
-
-          if (space->HasAddress(klass.Ptr())) {
-            DCHECK(!klass->IsErroneous()) << klass->GetStatus();
-            auto it = new_class_set->find(ClassTable::TableSlot(klass));
-            DCHECK(it != new_class_set->end());
-            DCHECK_EQ(it->Read(), klass);
-            ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
-
-            if (super_class != nullptr && !heap->ObjectIsInBootImageSpace(super_class)) {
-              auto it2 = new_class_set->find(ClassTable::TableSlot(super_class));
-              DCHECK(it2 != new_class_set->end());
-              DCHECK_EQ(it2->Read(), super_class);
-            }
-
-            for (ArtMethod& m : klass->GetDirectMethods(kRuntimePointerSize)) {
-              const void* code = m.GetEntryPointFromQuickCompiledCode();
-              const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
-              if (!class_linker->IsQuickResolutionStub(code) &&
-                  !class_linker->IsQuickGenericJniStub(code) &&
-                  !class_linker->IsQuickToInterpreterBridge(code) &&
-                  !m.IsNative()) {
-                DCHECK_EQ(code, oat_code) << m.PrettyMethod();
-              }
-            }
-
-            for (ArtMethod& m : klass->GetVirtualMethods(kRuntimePointerSize)) {
-              const void* code = m.GetEntryPointFromQuickCompiledCode();
-              const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
-              if (!class_linker->IsQuickResolutionStub(code) &&
-                  !class_linker->IsQuickGenericJniStub(code) &&
-                  !class_linker->IsQuickToInterpreterBridge(code) &&
-                  !m.IsNative()) {
-                DCHECK_EQ(code, oat_code) << m.PrettyMethod();
-              }
-            }
-          }
-        }
-      }
     }
   }
 
@@ -1762,8 +1657,6 @@
   Runtime* const runtime = Runtime::Current();
   InternTable* const intern_table = runtime->GetInternTable();
 
-  const bool load_startup_cache = runtime->LoadAppImageStartupCache();
-
   // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
   // for faster lookup.
   // TODO: Optimize with a bitmap or bloom filter
@@ -1817,7 +1710,6 @@
     VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
     VisitInternedStringReferences(
         space,
-        load_startup_cache,
         [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
           auto it = intern_remap.find(str.Ptr());
           if (it != intern_remap.end()) {
@@ -1931,15 +1823,6 @@
     heap->VisitObjects(visitor);
   }
 
-  static void CheckArtMethodDexCacheArray(gc::Heap* heap,
-                                          ClassLinker* class_linker,
-                                          mirror::MethodDexCacheType* arr,
-                                          size_t size)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    ImageChecker ic(heap, class_linker);
-    ic.CheckArtMethodDexCacheArray(arr, size);
-  }
-
  private:
   ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
      :  spaces_(heap->GetBootImageSpaces()),
@@ -1992,30 +1875,6 @@
     }
   }
 
-  void CheckArtMethodDexCacheArray(mirror::MethodDexCacheType* arr, size_t size)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    CHECK_EQ(arr != nullptr, size != 0u);
-    if (arr != nullptr) {
-      bool contains = false;
-      for (auto space : spaces_) {
-        auto offset = reinterpret_cast<uint8_t*>(arr) - space->Begin();
-        if (space->GetImageHeader().GetDexCacheArraysSection().Contains(offset)) {
-          contains = true;
-          break;
-        }
-      }
-      CHECK(contains);
-    }
-    for (size_t j = 0; j < size; ++j) {
-      auto pair = mirror::DexCache::GetNativePairPtrSize(arr, j, pointer_size_);
-      ArtMethod* method = pair.object;
-      // expected_class == null means we are a dex cache.
-      if (method != nullptr) {
-        CheckArtMethod(method, nullptr);
-      }
-    }
-  }
-
   const std::vector<gc::space::ImageSpace*>& spaces_;
   const PointerSize pointer_size_;
 
@@ -2027,8 +1886,8 @@
 
 static void VerifyAppImage(const ImageHeader& header,
                            const Handle<mirror::ClassLoader>& class_loader,
-                           const Handle<mirror::ObjectArray<mirror::DexCache> >& dex_caches,
-                           ClassTable* class_table, gc::space::ImageSpace* space)
+                           ClassTable* class_table,
+                           gc::space::ImageSpace* space)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
     ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
@@ -2056,17 +1915,6 @@
       }
     }
   }
-  // Check that all non-primitive classes in dex caches are also in the class table.
-  for (auto dex_cache : dex_caches.ConstIterate<mirror::DexCache>()) {
-    mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
-    for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
-      ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
-      if (klass != nullptr && !klass->IsPrimitive()) {
-        CHECK(class_table->Contains(klass))
-            << klass->PrettyDescriptor() << " " << dex_cache->GetDexFile()->GetLocation();
-      }
-    }
-  }
 }
 
 bool ClassLinker::AddImageSpace(
@@ -2138,24 +1986,15 @@
       return false;
     }
 
-    if (app_image) {
-      // The current dex file field is bogus, overwrite it so that we can get the dex file in the
-      // loop below.
-      dex_cache->SetDexFile(dex_file.get());
-      mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
-      for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
-        ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
-        if (klass != nullptr) {
-          DCHECK(!klass->IsErroneous()) << klass->GetStatus();
-        }
-      }
-    } else {
-      if (kCheckImageObjects) {
-        ImageChecker::CheckArtMethodDexCacheArray(heap,
-                                                  this,
-                                                  dex_cache->GetResolvedMethods(),
-                                                  dex_cache->NumResolvedMethods());
-      }
+    LinearAlloc* linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader.Get());
+    DCHECK(linear_alloc != nullptr);
+    DCHECK_EQ(linear_alloc == Runtime::Current()->GetLinearAlloc(), !app_image);
+    {
+      // Native fields are all null.  Initialize them and allocate native memory.
+      WriterMutexLock mu(self, *Locks::dex_lock_);
+      dex_cache->InitializeNativeFields(dex_file.get(), linear_alloc);
+    }
+    if (!app_image) {
       // Register dex files, keep track of existing ones that are conflicts.
       AppendToBootClassPath(dex_file.get(), dex_cache);
     }
@@ -2172,14 +2011,6 @@
   }
 
   if (kCheckImageObjects) {
-    for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
-      for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) {
-        auto* field = dex_cache->GetResolvedField(j, image_pointer_size_);
-        if (field != nullptr) {
-          CHECK(field->GetDeclaringClass()->GetClass() != nullptr);
-        }
-      }
-    }
     if (!app_image) {
       ImageChecker::CheckObjects(heap, this);
     }
@@ -2244,7 +2075,7 @@
     VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
   }
   if (app_image) {
-    AppImageLoadingHelper::Update(this, space, class_loader, dex_caches, &temp_set);
+    AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
 
     {
       ScopedTrace trace("AppImage:UpdateClassLoaders");
@@ -2297,7 +2128,7 @@
     // This verification needs to happen after the classes have been added to the class loader.
     // Since it ensures classes are in the class table.
     ScopedTrace trace("AppImage:Verify");
-    VerifyAppImage(header, class_loader, dex_caches, class_table, space);
+    VerifyAppImage(header, class_loader, class_table, space);
   }
 
   VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
@@ -2596,11 +2427,8 @@
           : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
 }
 
-ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(/*out*/ ObjPtr<mirror::String>* out_location,
-                                                    Thread* self,
-                                                    const DexFile& dex_file) {
+ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
   StackHandleScope<1> hs(self);
-  DCHECK(out_location != nullptr);
   auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
       GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
   if (dex_cache == nullptr) {
@@ -2614,24 +2442,17 @@
     self->AssertPendingOOMException();
     return nullptr;
   }
-  *out_location = location;
+  dex_cache->SetLocation(location);
   return dex_cache.Get();
 }
 
 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self,
                                                                  const DexFile& dex_file,
                                                                  LinearAlloc* linear_alloc) {
-  ObjPtr<mirror::String> location = nullptr;
-  ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(&location, self, dex_file);
+  ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
   if (dex_cache != nullptr) {
     WriterMutexLock mu(self, *Locks::dex_lock_);
-    DCHECK(location != nullptr);
-    mirror::DexCache::InitializeDexCache(self,
-                                         dex_cache,
-                                         location,
-                                         &dex_file,
-                                         linear_alloc,
-                                         image_pointer_size_);
+    dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
   }
   return dex_cache;
 }
@@ -4073,6 +3894,7 @@
   Thread* const self = Thread::Current();
   Locks::dex_lock_->AssertExclusiveHeld(self);
   CHECK(dex_cache != nullptr) << dex_file.GetLocation();
+  CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
   // For app images, the dex cache location may be a suffix of the dex file location since the
   // dex file location is an absolute path.
   const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
@@ -4119,7 +3941,6 @@
   hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
 
   jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
-  dex_cache->SetDexFile(&dex_file);
   DexCacheData data;
   data.weak_root = dex_cache_jweak;
   data.dex_file = dex_cache->GetDexFile();
@@ -4233,11 +4054,7 @@
   // get to a suspend point.
   StackHandleScope<3> hs(self);
   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
-  ObjPtr<mirror::String> location;
-  Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(/*out*/&location,
-                                                                  self,
-                                                                  dex_file)));
-  Handle<mirror::String> h_location(hs.NewHandle(location));
+  Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
   {
     // Avoid a deadlock between a garbage collecting thread running a checkpoint,
     // a thread holding the dex lock and blocking on a condition variable regarding
@@ -4247,15 +4064,10 @@
     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
     old_dex_cache = DecodeDexCacheLocked(self, old_data);
     if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
-      // Do InitializeDexCache while holding dex lock to make sure two threads don't call it at the
-      // same time with the same dex cache. Since the .bss is shared this can cause failing DCHECK
-      // that the arrays are null.
-      mirror::DexCache::InitializeDexCache(self,
-                                           h_dex_cache.Get(),
-                                           h_location.Get(),
-                                           &dex_file,
-                                           linear_alloc,
-                                           image_pointer_size_);
+      // Do InitializeNativeFields while holding dex lock to make sure two threads don't call it
+      // at the same time with the same dex cache. Since the .bss is shared this can cause failing
+      // DCHECK that the arrays are null.
+      h_dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
       RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
     }
     if (old_dex_cache != nullptr) {
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 33cd2f9..df9c209 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -926,9 +926,7 @@
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(!Roles::uninterruptible_);
 
-  ObjPtr<mirror::DexCache> AllocDexCache(/*out*/ ObjPtr<mirror::String>* out_location,
-                                         Thread* self,
-                                         const DexFile& dex_file)
+  ObjPtr<mirror::DexCache> AllocDexCache(Thread* self, const DexFile& dex_file)
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(!Roles::uninterruptible_);
 
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index ef85191..c677601 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -1528,13 +1528,10 @@
     }
     ASSERT_TRUE(dex_cache != nullptr);
   }
-  // Make a copy of the dex cache and change the name.
-  dex_cache.Assign(mirror::Object::Clone(dex_cache, soa.Self())->AsDexCache());
   const uint16_t data[] = { 0x20AC, 0x20A1 };
   Handle<mirror::String> location(hs.NewHandle(mirror::String::AllocFromUtf16(soa.Self(),
                                                                               arraysize(data),
                                                                               data)));
-  dex_cache->SetLocation(location.Get());
   const DexFile* old_dex_file = dex_cache->GetDexFile();
 
   std::unique_ptr<DexFile> dex_file(new StandardDexFile(old_dex_file->Begin(),
@@ -1543,6 +1540,10 @@
                                                         0u,
                                                         nullptr,
                                                         nullptr));
+  // Make a copy of the dex cache with changed name.
+  LinearAlloc* alloc = Runtime::Current()->GetLinearAlloc();
+  dex_cache.Assign(class_linker->AllocAndInitializeDexCache(Thread::Current(), *dex_file, alloc));
+  DCHECK_EQ(dex_cache->GetLocation()->CompareTo(location.Get()), 0);
   {
     WriterMutexLock mu(soa.Self(), *Locks::dex_lock_);
     // Check that inserting with a UTF16 name works.
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index a76e366..99aee0b 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -411,35 +411,6 @@
       const {}
   void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
 
-  void VisitDexCacheArrays(ObjPtr<mirror::DexCache> dex_cache)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    ScopedTrace st("VisitDexCacheArrays");
-    FixupDexCacheArray<mirror::StringDexCacheType>(dex_cache,
-                                                   mirror::DexCache::StringsOffset(),
-                                                   dex_cache->NumStrings<kVerifyNone>());
-    FixupDexCacheArray<mirror::TypeDexCacheType>(dex_cache,
-                                                 mirror::DexCache::ResolvedTypesOffset(),
-                                                 dex_cache->NumResolvedTypes<kVerifyNone>());
-    FixupDexCacheArray<mirror::MethodDexCacheType>(dex_cache,
-                                                   mirror::DexCache::ResolvedMethodsOffset(),
-                                                   dex_cache->NumResolvedMethods<kVerifyNone>());
-    FixupDexCacheArray<mirror::FieldDexCacheType>(dex_cache,
-                                                  mirror::DexCache::ResolvedFieldsOffset(),
-                                                  dex_cache->NumResolvedFields<kVerifyNone>());
-    FixupDexCacheArray<mirror::MethodTypeDexCacheType>(
-        dex_cache,
-        mirror::DexCache::ResolvedMethodTypesOffset(),
-        dex_cache->NumResolvedMethodTypes<kVerifyNone>());
-    FixupDexCacheArray<GcRoot<mirror::CallSite>>(
-        dex_cache,
-        mirror::DexCache::ResolvedCallSitesOffset(),
-        dex_cache->NumResolvedCallSites<kVerifyNone>());
-    FixupDexCacheArray<GcRoot<mirror::String>>(
-        dex_cache,
-        mirror::DexCache::PreResolvedStringsOffset(),
-        dex_cache->NumPreResolvedStrings<kVerifyNone>());
-  }
-
   template <bool kMayBeNull = true, typename T>
   ALWAYS_INLINE void PatchGcRoot(/*inout*/GcRoot<T>* root) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -486,54 +457,6 @@
     }
   }
 
-  template <typename T>
-  void FixupDexCacheArrayEntry(std::atomic<mirror::DexCachePair<T>>* array, uint32_t index)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    static_assert(sizeof(std::atomic<mirror::DexCachePair<T>>) == sizeof(mirror::DexCachePair<T>),
-                  "Size check for removing std::atomic<>.");
-    PatchGcRoot(&(reinterpret_cast<mirror::DexCachePair<T>*>(array)[index].object));
-  }
-
-  template <typename T>
-  void FixupDexCacheArrayEntry(std::atomic<mirror::NativeDexCachePair<T>>* array, uint32_t index)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    static_assert(sizeof(std::atomic<mirror::NativeDexCachePair<T>>) ==
-                      sizeof(mirror::NativeDexCachePair<T>),
-                  "Size check for removing std::atomic<>.");
-    mirror::NativeDexCachePair<T> pair =
-        mirror::DexCache::GetNativePairPtrSize(array, index, kPointerSize);
-    if (pair.object != nullptr) {
-      pair.object = native_visitor_(pair.object);
-      mirror::DexCache::SetNativePairPtrSize(array, index, pair, kPointerSize);
-    }
-  }
-
-  void FixupDexCacheArrayEntry(GcRoot<mirror::CallSite>* array, uint32_t index)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    PatchGcRoot(&array[index]);
-  }
-
-  void FixupDexCacheArrayEntry(GcRoot<mirror::String>* array, uint32_t index)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    PatchGcRoot(&array[index]);
-  }
-
-  template <typename EntryType>
-  void FixupDexCacheArray(ObjPtr<mirror::DexCache> dex_cache,
-                          MemberOffset array_offset,
-                          uint32_t size) REQUIRES_SHARED(Locks::mutator_lock_) {
-    EntryType* old_array =
-        reinterpret_cast64<EntryType*>(dex_cache->GetField64<kVerifyNone>(array_offset));
-    DCHECK_EQ(old_array != nullptr, size != 0u);
-    if (old_array != nullptr) {
-      EntryType* new_array = native_visitor_(old_array);
-      dex_cache->SetField64<kVerifyNone>(array_offset, reinterpret_cast64<uint64_t>(new_array));
-      for (uint32_t i = 0; i != size; ++i) {
-        FixupDexCacheArrayEntry(new_array, i);
-      }
-    }
-  }
-
  private:
   // Heap objects visitor.
   HeapVisitor heap_visitor_;
@@ -1399,15 +1322,6 @@
       image_header->RelocateImageReferences(app_image_objects.Delta());
       image_header->RelocateBootImageReferences(boot_image.Delta());
       CHECK_EQ(image_header->GetImageBegin(), target_base);
-      // Fix up dex cache DexFile pointers.
-      ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
-          image_header->GetImageRoot<kWithoutReadBarrier>(ImageHeader::kDexCaches)
-              ->AsObjectArray<mirror::DexCache, kVerifyNone>();
-      for (int32_t i = 0, count = dex_caches->GetLength(); i < count; ++i) {
-        ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get<kVerifyNone, kWithoutReadBarrier>(i);
-        CHECK(dex_cache != nullptr);
-        patch_object_visitor.VisitDexCacheArrays(dex_cache);
-      }
     }
     {
       // Only touches objects in the app image, no need for mutator lock.
@@ -2835,12 +2749,7 @@
           // This is the last pass over objects, so we do not need to Set().
           main_patch_object_visitor.VisitObject(object);
           ObjPtr<mirror::Class> klass = object->GetClass<kVerifyNone, kWithoutReadBarrier>();
-          if (klass->IsDexCacheClass<kVerifyNone>()) {
-            // Patch dex cache array pointers and elements.
-            ObjPtr<mirror::DexCache> dex_cache =
-                object->AsDexCache<kVerifyNone, kWithoutReadBarrier>();
-            main_patch_object_visitor.VisitDexCacheArrays(dex_cache);
-          } else if (klass == method_class || klass == constructor_class) {
+          if (klass == method_class || klass == constructor_class) {
             // Patch the ArtMethod* in the mirror::Executable subobject.
             ObjPtr<mirror::Executable> as_executable =
                 ObjPtr<mirror::Executable>::DownCast(object);
@@ -3919,39 +3828,14 @@
   }
 }
 
-void ImageSpace::DisablePreResolvedStrings() {
-  // Clear dex cache pointers.
-  ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
-      GetImageHeader().GetImageRoot(ImageHeader::kDexCaches)->AsObjectArray<mirror::DexCache>();
-  for (size_t len = dex_caches->GetLength(), i = 0; i < len; ++i) {
-    ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
-    dex_cache->ClearPreResolvedStrings();
-  }
-}
-
 void ImageSpace::ReleaseMetadata() {
   const ImageSection& metadata = GetImageHeader().GetMetadataSection();
   VLOG(image) << "Releasing " << metadata.Size() << " image metadata bytes";
-  // In the case where new app images may have been added around the checkpoint, ensure that we
-  // don't madvise the cache for these.
-  ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
-      GetImageHeader().GetImageRoot(ImageHeader::kDexCaches)->AsObjectArray<mirror::DexCache>();
-  bool have_startup_cache = false;
-  for (size_t len = dex_caches->GetLength(), i = 0; i < len; ++i) {
-    ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
-    if (dex_cache->NumPreResolvedStrings() != 0u) {
-      have_startup_cache = true;
-    }
-  }
-  // Only safe to do for images that have their preresolved strings caches disabled. This is because
-  // uncompressed images madvise to the original unrelocated image contents.
-  if (!have_startup_cache) {
-    // Avoid using ZeroAndReleasePages since the zero fill might not be word atomic.
-    uint8_t* const page_begin = AlignUp(Begin() + metadata.Offset(), kPageSize);
-    uint8_t* const page_end = AlignDown(Begin() + metadata.End(), kPageSize);
-    if (page_begin < page_end) {
-      CHECK_NE(madvise(page_begin, page_end - page_begin, MADV_DONTNEED), -1) << "madvise failed";
-    }
+  // Avoid using ZeroAndReleasePages since the zero fill might not be word atomic.
+  uint8_t* const page_begin = AlignUp(Begin() + metadata.Offset(), kPageSize);
+  uint8_t* const page_end = AlignDown(Begin() + metadata.End(), kPageSize);
+  if (page_begin < page_end) {
+    CHECK_NE(madvise(page_begin, page_end - page_begin, MADV_DONTNEED), -1) << "madvise failed";
   }
 }
 
diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h
index 81ae724..36889fe 100644
--- a/runtime/gc/space/image_space.h
+++ b/runtime/gc/space/image_space.h
@@ -273,7 +273,6 @@
   // De-initialize the image-space by undoing the effects in Init().
   virtual ~ImageSpace();
 
-  void DisablePreResolvedStrings() REQUIRES_SHARED(Locks::mutator_lock_);
   void ReleaseMetadata() REQUIRES_SHARED(Locks::mutator_lock_);
 
  protected:
diff --git a/runtime/image.cc b/runtime/image.cc
index d91106a..6f88481 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -29,7 +29,7 @@
 namespace art {
 
 const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '8', '7', '\0' };  // Long.divideUnsigned
+const uint8_t ImageHeader::kImageVersion[] = { '0', '8', '8', '\0' };  // Remove DexCache arrays.
 
 ImageHeader::ImageHeader(uint32_t image_reservation_size,
                          uint32_t component_count,
diff --git a/runtime/image.h b/runtime/image.h
index cdeb79b..61db627 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -258,7 +258,6 @@
     kSectionRuntimeMethods,
     kSectionImTables,
     kSectionIMTConflictTables,
-    kSectionDexCacheArrays,
     kSectionInternedStrings,
     kSectionClassTable,
     kSectionStringReferenceOffsets,
@@ -309,10 +308,6 @@
     return GetImageSection(kSectionIMTConflictTables);
   }
 
-  const ImageSection& GetDexCacheArraysSection() const {
-    return GetImageSection(kSectionDexCacheArrays);
-  }
-
   const ImageSection& GetInternedStringsSection() const {
     return GetImageSection(kSectionInternedStrings);
   }
@@ -509,76 +504,11 @@
  * This type holds the information necessary to fix up AppImage string
  * references.
  *
- * The first element of the pair is an offset into the image space.  If the
- * offset is tagged (testable using HasDexCacheNativeRefTag) it indicates the location
- * of a DexCache object that has one or more native references to managed
- * strings that need to be fixed up.  In this case the second element has no
- * meaningful value.
- *
- * If the first element isn't tagged then it indicates the location of a
- * managed object with a field that needs fixing up.  In this case the second
- * element of the pair is an object-relative offset to the field in question.
+ * The first element indicates the location of a managed object with a field that needs fixing up.
+ * The second element of the pair is an object-relative offset to the field in question.
  */
 typedef std::pair<uint32_t, uint32_t> AppImageReferenceOffsetInfo;
 
-/*
- * Tags the last bit.  Used by AppImage logic to differentiate between pointers
- * to managed objects and pointers to native reference arrays.
- */
-template<typename T>
-T SetDexCacheStringNativeRefTag(T val) {
-  static_assert(std::is_integral<T>::value, "Expected integral type.");
-
-  return val | 1u;
-}
-
-/*
- * Tags the second last bit.  Used by AppImage logic to differentiate between pointers
- * to managed objects and pointers to native reference arrays.
- */
-template<typename T>
-T SetDexCachePreResolvedStringNativeRefTag(T val) {
-  static_assert(std::is_integral<T>::value, "Expected integral type.");
-
-  return val | 2u;
-}
-
-/*
- * Retrieves the value of the last bit.  Used by AppImage logic to
- * differentiate between pointers to managed objects and pointers to native
- * reference arrays.
- */
-template<typename T>
-bool HasDexCacheStringNativeRefTag(T val) {
-  static_assert(std::is_integral<T>::value, "Expected integral type.");
-
-  return (val & 1u) != 0u;
-}
-
-/*
- * Retrieves the value of the second last bit.  Used by AppImage logic to
- * differentiate between pointers to managed objects and pointers to native
- * reference arrays.
- */
-template<typename T>
-bool HasDexCachePreResolvedStringNativeRefTag(T val) {
-  static_assert(std::is_integral<T>::value, "Expected integral type.");
-
-  return (val & 2u) != 0u;
-}
-
-/*
- * Sets the last bit of the value to 0.  Used by AppImage logic to
- * differentiate between pointers to managed objects and pointers to native
- * reference arrays.
- */
-template<typename T>
-T ClearDexCacheNativeRefTags(T val) {
-  static_assert(std::is_integral<T>::value, "Expected integral type.");
-
-  return val & ~3u;
-}
-
 std::ostream& operator<<(std::ostream& os, ImageHeader::ImageMethod method);
 std::ostream& operator<<(std::ostream& os, ImageHeader::ImageRoot root);
 std::ostream& operator<<(std::ostream& os, ImageHeader::ImageSections section);
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 010c5a5..7736f47 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -282,8 +282,8 @@
 template <typename T>
 NativeDexCachePair<T> DexCache::GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
                                                      size_t idx,
-                                                     PointerSize ptr_size) {
-  if (ptr_size == PointerSize::k64) {
+                                                     PointerSize ptr_size ATTRIBUTE_UNUSED) {
+  if (kRuntimePointerSize == PointerSize::k64) {
     auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
     ConversionPair64 value = AtomicLoadRelaxed16B(&array[idx]);
     return NativeDexCachePair<T>(reinterpret_cast64<T*>(value.first),
@@ -299,8 +299,8 @@
 void DexCache::SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
                                     size_t idx,
                                     NativeDexCachePair<T> pair,
-                                    PointerSize ptr_size) {
-  if (ptr_size == PointerSize::k64) {
+                                    PointerSize ptr_size ATTRIBUTE_UNUSED) {
+  if (kRuntimePointerSize == PointerSize::k64) {
     auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
     ConversionPair64 v(reinterpret_cast64<uint64_t>(pair.object), pair.index);
     AtomicStoreRelease16B(&array[idx], v);
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index b7adcc2..20f4a40 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -30,88 +30,54 @@
 #include "runtime_globals.h"
 #include "string.h"
 #include "thread.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
 #include "write_barrier.h"
 
 namespace art {
 namespace mirror {
 
-void DexCache::InitializeDexCache(Thread* self,
-                                  ObjPtr<mirror::DexCache> dex_cache,
-                                  ObjPtr<mirror::String> location,
-                                  const DexFile* dex_file,
-                                  LinearAlloc* linear_alloc,
-                                  PointerSize image_pointer_size) {
-  DCHECK(dex_file != nullptr);
+template<typename T>
+static T* AllocArray(Thread* self, LinearAlloc* alloc, size_t num) {
+  if (num == 0) {
+    return nullptr;
+  }
+  return reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16)));
+}
+
+void DexCache::InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc) {
+  DCHECK(GetDexFile() == nullptr);
+  DCHECK(GetStrings() == nullptr);
+  DCHECK(GetResolvedTypes() == nullptr);
+  DCHECK(GetResolvedMethods() == nullptr);
+  DCHECK(GetResolvedFields() == nullptr);
+  DCHECK(GetResolvedMethodTypes() == nullptr);
+  DCHECK(GetResolvedCallSites() == nullptr);
+
   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
-  DexCacheArraysLayout layout(image_pointer_size, dex_file);
-  uint8_t* raw_arrays = nullptr;
+  Thread* self = Thread::Current();
+  const PointerSize image_pointer_size = kRuntimePointerSize;
 
-  if (dex_file->NumStringIds() != 0u ||
-      dex_file->NumTypeIds() != 0u ||
-      dex_file->NumMethodIds() != 0u ||
-      dex_file->NumFieldIds() != 0u) {
-    static_assert(ArenaAllocator::kAlignment == 8, "Expecting arena alignment of 8.");
-    DCHECK(layout.Alignment() == 8u || layout.Alignment() == 16u);
-    // Zero-initialized.
-    raw_arrays = (layout.Alignment() == 16u)
-        ? reinterpret_cast<uint8_t*>(linear_alloc->AllocAlign16(self, layout.Size()))
-        : reinterpret_cast<uint8_t*>(linear_alloc->Alloc(self, layout.Size()));
-  }
+  size_t num_strings = std::min<size_t>(kDexCacheStringCacheSize, dex_file->NumStringIds());
+  size_t num_types = std::min<size_t>(kDexCacheTypeCacheSize, dex_file->NumTypeIds());
+  size_t num_fields = std::min<size_t>(kDexCacheFieldCacheSize, dex_file->NumFieldIds());
+  size_t num_methods = std::min<size_t>(kDexCacheMethodCacheSize, dex_file->NumMethodIds());
+  size_t num_method_types = std::min<size_t>(kDexCacheMethodTypeCacheSize, dex_file->NumProtoIds());
+  size_t num_call_sites = dex_file->NumCallSiteIds();  // Full size.
 
-  StringDexCacheType* strings = (dex_file->NumStringIds() == 0u) ? nullptr :
-      reinterpret_cast<StringDexCacheType*>(raw_arrays + layout.StringsOffset());
-  TypeDexCacheType* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
-      reinterpret_cast<TypeDexCacheType*>(raw_arrays + layout.TypesOffset());
-  MethodDexCacheType* methods = (dex_file->NumMethodIds() == 0u) ? nullptr :
-      reinterpret_cast<MethodDexCacheType*>(raw_arrays + layout.MethodsOffset());
-  FieldDexCacheType* fields = (dex_file->NumFieldIds() == 0u) ? nullptr :
-      reinterpret_cast<FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
+  static_assert(ArenaAllocator::kAlignment == 8, "Expecting arena alignment of 8.");
+  StringDexCacheType* strings =
+      AllocArray<StringDexCacheType>(self, linear_alloc, num_strings);
+  TypeDexCacheType* types =
+      AllocArray<TypeDexCacheType>(self, linear_alloc, num_types);
+  MethodDexCacheType* methods =
+      AllocArray<MethodDexCacheType>(self, linear_alloc, num_methods);
+  FieldDexCacheType* fields =
+      AllocArray<FieldDexCacheType>(self, linear_alloc, num_fields);
+  MethodTypeDexCacheType* method_types =
+      AllocArray<MethodTypeDexCacheType>(self, linear_alloc, num_method_types);
+  GcRoot<mirror::CallSite>* call_sites =
+      AllocArray<GcRoot<CallSite>>(self, linear_alloc, num_call_sites);
 
-  size_t num_strings = kDexCacheStringCacheSize;
-  if (dex_file->NumStringIds() < num_strings) {
-    num_strings = dex_file->NumStringIds();
-  }
-  size_t num_types = kDexCacheTypeCacheSize;
-  if (dex_file->NumTypeIds() < num_types) {
-    num_types = dex_file->NumTypeIds();
-  }
-  size_t num_fields = kDexCacheFieldCacheSize;
-  if (dex_file->NumFieldIds() < num_fields) {
-    num_fields = dex_file->NumFieldIds();
-  }
-  size_t num_methods = kDexCacheMethodCacheSize;
-  if (dex_file->NumMethodIds() < num_methods) {
-    num_methods = dex_file->NumMethodIds();
-  }
-
-  // Note that we allocate the method type dex caches regardless of this flag,
-  // and we make sure here that they're not used by the runtime. This is in the
-  // interest of simplicity and to avoid extensive compiler and layout class changes.
-  //
-  // If this needs to be mitigated in a production system running this code,
-  // DexCache::kDexCacheMethodTypeCacheSize can be set to zero.
-  MethodTypeDexCacheType* method_types = nullptr;
-  size_t num_method_types = 0;
-
-  if (dex_file->NumProtoIds() < kDexCacheMethodTypeCacheSize) {
-    num_method_types = dex_file->NumProtoIds();
-  } else {
-    num_method_types = kDexCacheMethodTypeCacheSize;
-  }
-
-  if (num_method_types > 0) {
-    method_types = reinterpret_cast<MethodTypeDexCacheType*>(
-        raw_arrays + layout.MethodTypesOffset());
-  }
-
-  GcRoot<mirror::CallSite>* call_sites = (dex_file->NumCallSiteIds() == 0)
-      ? nullptr
-      : reinterpret_cast<GcRoot<CallSite>*>(raw_arrays + layout.CallSitesOffset());
-
-  DCHECK_ALIGNED(raw_arrays, alignof(StringDexCacheType)) <<
-                 "Expected raw_arrays to align to StringDexCacheType.";
-  DCHECK_ALIGNED(layout.StringsOffset(), alignof(StringDexCacheType)) <<
+  DCHECK_ALIGNED(types, alignof(StringDexCacheType)) <<
                  "Expected StringsOffset() to align to StringDexCacheType.";
   DCHECK_ALIGNED(strings, alignof(StringDexCacheType)) <<
                  "Expected strings to align to StringDexCacheType.";
@@ -158,9 +124,8 @@
   if (method_types != nullptr) {
     mirror::MethodTypeDexCachePair::Initialize(method_types);
   }
-  dex_cache->Init(dex_file,
-                  location,
-                  strings,
+  SetDexFile(dex_file);
+  SetNativeArrays(strings,
                   num_strings,
                   types,
                   num_types,
@@ -171,7 +136,12 @@
                   method_types,
                   num_method_types,
                   call_sites,
-                  dex_file->NumCallSiteIds());
+                  num_call_sites);
+}
+
+void DexCache::ResetNativeFields() {
+  SetDexFile(nullptr);
+  SetNativeArrays(nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0);
 }
 
 void DexCache::VisitReflectiveTargets(ReflectiveValueVisitor* visitor) {
@@ -238,31 +208,24 @@
   return true;
 }
 
-void DexCache::Init(const DexFile* dex_file,
-                    ObjPtr<String> location,
-                    StringDexCacheType* strings,
-                    uint32_t num_strings,
-                    TypeDexCacheType* resolved_types,
-                    uint32_t num_resolved_types,
-                    MethodDexCacheType* resolved_methods,
-                    uint32_t num_resolved_methods,
-                    FieldDexCacheType* resolved_fields,
-                    uint32_t num_resolved_fields,
-                    MethodTypeDexCacheType* resolved_method_types,
-                    uint32_t num_resolved_method_types,
-                    GcRoot<CallSite>* resolved_call_sites,
-                    uint32_t num_resolved_call_sites) {
-  CHECK(dex_file != nullptr);
-  CHECK(location != nullptr);
+void DexCache::SetNativeArrays(StringDexCacheType* strings,
+                               uint32_t num_strings,
+                               TypeDexCacheType* resolved_types,
+                               uint32_t num_resolved_types,
+                               MethodDexCacheType* resolved_methods,
+                               uint32_t num_resolved_methods,
+                               FieldDexCacheType* resolved_fields,
+                               uint32_t num_resolved_fields,
+                               MethodTypeDexCacheType* resolved_method_types,
+                               uint32_t num_resolved_method_types,
+                               GcRoot<CallSite>* resolved_call_sites,
+                               uint32_t num_resolved_call_sites) {
   CHECK_EQ(num_strings != 0u, strings != nullptr);
   CHECK_EQ(num_resolved_types != 0u, resolved_types != nullptr);
   CHECK_EQ(num_resolved_methods != 0u, resolved_methods != nullptr);
   CHECK_EQ(num_resolved_fields != 0u, resolved_fields != nullptr);
   CHECK_EQ(num_resolved_method_types != 0u, resolved_method_types != nullptr);
   CHECK_EQ(num_resolved_call_sites != 0u, resolved_call_sites != nullptr);
-
-  SetDexFile(dex_file);
-  SetLocation(location);
   SetStrings(strings);
   SetResolvedTypes(resolved_types);
   SetResolvedMethods(resolved_methods);
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 80cca4e..2a16879 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -186,15 +186,14 @@
     return sizeof(DexCache);
   }
 
-  static void InitializeDexCache(Thread* self,
-                                 ObjPtr<mirror::DexCache> dex_cache,
-                                 ObjPtr<mirror::String> location,
-                                 const DexFile* dex_file,
-                                 LinearAlloc* linear_alloc,
-                                 PointerSize image_pointer_size)
+  // Initialize native fields and allocate memory.
+  void InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc)
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(Locks::dex_lock_);
 
+  // Clear all native fields.
+  void ResetNativeFields() REQUIRES_SHARED(Locks::mutator_lock_);
+
   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
   void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
       REQUIRES_SHARED(Locks::mutator_lock_);
@@ -479,20 +478,18 @@
   void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_);
 
  private:
-  void Init(const DexFile* dex_file,
-            ObjPtr<String> location,
-            StringDexCacheType* strings,
-            uint32_t num_strings,
-            TypeDexCacheType* resolved_types,
-            uint32_t num_resolved_types,
-            MethodDexCacheType* resolved_methods,
-            uint32_t num_resolved_methods,
-            FieldDexCacheType* resolved_fields,
-            uint32_t num_resolved_fields,
-            MethodTypeDexCacheType* resolved_method_types,
-            uint32_t num_resolved_method_types,
-            GcRoot<CallSite>* resolved_call_sites,
-            uint32_t num_resolved_call_sites)
+  void SetNativeArrays(StringDexCacheType* strings,
+                       uint32_t num_strings,
+                       TypeDexCacheType* resolved_types,
+                       uint32_t num_resolved_types,
+                       MethodDexCacheType* resolved_methods,
+                       uint32_t num_resolved_methods,
+                       FieldDexCacheType* resolved_fields,
+                       uint32_t num_resolved_fields,
+                       MethodTypeDexCacheType* resolved_method_types,
+                       uint32_t num_resolved_method_types,
+                       GcRoot<CallSite>* resolved_call_sites,
+                       uint32_t num_resolved_call_sites)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
   // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
diff --git a/runtime/oat.h b/runtime/oat.h
index f43aa11..17d3838 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,8 +32,8 @@
 class PACKED(4) OatHeader {
  public:
   static constexpr std::array<uint8_t, 4> kOatMagic { { 'o', 'a', 't', '\n' } };
-  // Last oat version changed reason: Deprecation of 'quicken'.
-  static constexpr std::array<uint8_t, 4> kOatVersion { { '1', '8', '8', '\0' } };
+  // Last oat version changed reason: Remove DexCache arrays.
+  static constexpr std::array<uint8_t, 4> kOatVersion { { '1', '8', '9', '\0' } };
 
   static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
   static constexpr const char* kDebuggableKey = "debuggable";
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index f42318b..ac3c392 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -2975,14 +2975,6 @@
     {
       ScopedTrace trace("Releasing app image spaces metadata");
       ScopedObjectAccess soa(Thread::Current());
-      for (gc::space::ContinuousSpace* space : runtime->GetHeap()->GetContinuousSpaces()) {
-        if (space->IsImageSpace()) {
-          gc::space::ImageSpace* image_space = space->AsImageSpace();
-          if (image_space->GetImageHeader().IsAppImage()) {
-            image_space->DisablePreResolvedStrings();
-          }
-        }
-      }
       // Request empty checkpoints to make sure no threads are accessing the image space metadata
       // section when we madvise it. Use GC exclusion to prevent deadlocks that may happen if
       // multiple threads are attempting to run empty checkpoints at the same time.
diff --git a/runtime/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h
deleted file mode 100644
index 3512efe..0000000
--- a/runtime/utils/dex_cache_arrays_layout-inl.h
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
-#define ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
-
-#include "dex_cache_arrays_layout.h"
-
-#include <android-base/logging.h>
-
-#include "base/bit_utils.h"
-#include "dex/primitive.h"
-#include "gc_root.h"
-#include "mirror/dex_cache.h"
-#include "runtime_globals.h"
-
-namespace art {
-
-inline DexCacheArraysLayout::DexCacheArraysLayout(PointerSize pointer_size,
-                                                  const DexFile::Header& header,
-                                                  uint32_t num_call_sites)
-    : pointer_size_(pointer_size),
-      /* types_offset_ is always 0u, so it's constexpr */
-      methods_offset_(
-          RoundUp(types_offset_ + TypesSize(header.type_ids_size_), MethodsAlignment())),
-      strings_offset_(
-          RoundUp(methods_offset_ + MethodsSize(header.method_ids_size_), StringsAlignment())),
-      fields_offset_(
-          RoundUp(strings_offset_ + StringsSize(header.string_ids_size_), FieldsAlignment())),
-      method_types_offset_(
-          RoundUp(fields_offset_ + FieldsSize(header.field_ids_size_), MethodTypesAlignment())),
-    call_sites_offset_(
-        RoundUp(method_types_offset_ + MethodTypesSize(header.proto_ids_size_),
-                MethodTypesAlignment())),
-      size_(RoundUp(call_sites_offset_ + CallSitesSize(num_call_sites), Alignment())) {
-}
-
-inline DexCacheArraysLayout::DexCacheArraysLayout(PointerSize pointer_size, const DexFile* dex_file)
-    : DexCacheArraysLayout(pointer_size, dex_file->GetHeader(), dex_file->NumCallSiteIds()) {
-}
-
-inline size_t DexCacheArraysLayout::Alignment() const {
-  return Alignment(pointer_size_);
-}
-
-inline constexpr size_t DexCacheArraysLayout::Alignment(PointerSize pointer_size) {
-  // mirror::Type/String/MethodTypeDexCacheType alignment is 8,
-  // i.e. higher than or equal to the pointer alignment.
-  static_assert(alignof(mirror::TypeDexCacheType) == 8,
-                "Expecting alignof(ClassDexCacheType) == 8");
-  static_assert(alignof(mirror::StringDexCacheType) == 8,
-                "Expecting alignof(StringDexCacheType) == 8");
-  static_assert(alignof(mirror::MethodTypeDexCacheType) == 8,
-                "Expecting alignof(MethodTypeDexCacheType) == 8");
-  // This is the same as alignof({Field,Method}DexCacheType) for the given pointer size.
-  return 2u * static_cast<size_t>(pointer_size);
-}
-
-template <typename T>
-constexpr PointerSize GcRootAsPointerSize() {
-  static_assert(sizeof(GcRoot<T>) == 4U, "Unexpected GcRoot size");
-  return PointerSize::k32;
-}
-
-inline size_t DexCacheArraysLayout::TypeOffset(dex::TypeIndex type_idx) const {
-  return types_offset_ + ElementOffset(PointerSize::k64,
-                                       type_idx.index_ % mirror::DexCache::kDexCacheTypeCacheSize);
-}
-
-inline size_t DexCacheArraysLayout::TypesSize(size_t num_elements) const {
-  size_t cache_size = mirror::DexCache::kDexCacheTypeCacheSize;
-  if (num_elements < cache_size) {
-    cache_size = num_elements;
-  }
-  return PairArraySize(GcRootAsPointerSize<mirror::Class>(), cache_size);
-}
-
-inline size_t DexCacheArraysLayout::TypesAlignment() const {
-  return alignof(GcRoot<mirror::Class>);
-}
-
-inline size_t DexCacheArraysLayout::MethodOffset(uint32_t method_idx) const {
-  return methods_offset_ + ElementOffset(pointer_size_, method_idx);
-}
-
-inline size_t DexCacheArraysLayout::MethodsSize(size_t num_elements) const {
-  size_t cache_size = mirror::DexCache::kDexCacheMethodCacheSize;
-  if (num_elements < cache_size) {
-    cache_size = num_elements;
-  }
-  return PairArraySize(pointer_size_, cache_size);
-}
-
-inline size_t DexCacheArraysLayout::MethodsAlignment() const {
-  return 2u * static_cast<size_t>(pointer_size_);
-}
-
-inline size_t DexCacheArraysLayout::StringOffset(uint32_t string_idx) const {
-  uint32_t string_hash = string_idx % mirror::DexCache::kDexCacheStringCacheSize;
-  return strings_offset_ + ElementOffset(PointerSize::k64, string_hash);
-}
-
-inline size_t DexCacheArraysLayout::StringsSize(size_t num_elements) const {
-  size_t cache_size = mirror::DexCache::kDexCacheStringCacheSize;
-  if (num_elements < cache_size) {
-    cache_size = num_elements;
-  }
-  return PairArraySize(GcRootAsPointerSize<mirror::String>(), cache_size);
-}
-
-inline size_t DexCacheArraysLayout::StringsAlignment() const {
-  static_assert(alignof(mirror::StringDexCacheType) == 8,
-                "Expecting alignof(StringDexCacheType) == 8");
-  return alignof(mirror::StringDexCacheType);
-}
-
-inline size_t DexCacheArraysLayout::FieldOffset(uint32_t field_idx) const {
-  uint32_t field_hash = field_idx % mirror::DexCache::kDexCacheFieldCacheSize;
-  return fields_offset_ + 2u * static_cast<size_t>(pointer_size_) * field_hash;
-}
-
-inline size_t DexCacheArraysLayout::FieldsSize(size_t num_elements) const {
-  size_t cache_size = mirror::DexCache::kDexCacheFieldCacheSize;
-  if (num_elements < cache_size) {
-    cache_size = num_elements;
-  }
-  return PairArraySize(pointer_size_, cache_size);
-}
-
-inline size_t DexCacheArraysLayout::FieldsAlignment() const {
-  return 2u * static_cast<size_t>(pointer_size_);
-}
-
-inline size_t DexCacheArraysLayout::MethodTypesSize(size_t num_elements) const {
-  size_t cache_size = mirror::DexCache::kDexCacheMethodTypeCacheSize;
-  if (num_elements < cache_size) {
-    cache_size = num_elements;
-  }
-
-  return ArraySize(PointerSize::k64, cache_size);
-}
-
-inline size_t DexCacheArraysLayout::MethodTypesAlignment() const {
-  static_assert(alignof(mirror::MethodTypeDexCacheType) == 8,
-                "Expecting alignof(MethodTypeDexCacheType) == 8");
-  return alignof(mirror::MethodTypeDexCacheType);
-}
-
-inline size_t DexCacheArraysLayout::CallSitesSize(size_t num_elements) const {
-  return ArraySize(GcRootAsPointerSize<mirror::CallSite>(), num_elements);
-}
-
-inline size_t DexCacheArraysLayout::CallSitesAlignment() const {
-  return alignof(GcRoot<mirror::CallSite>);
-}
-
-inline size_t DexCacheArraysLayout::ElementOffset(PointerSize element_size, uint32_t idx) {
-  return static_cast<size_t>(element_size) * idx;
-}
-
-inline size_t DexCacheArraysLayout::ArraySize(PointerSize element_size, uint32_t num_elements) {
-  return static_cast<size_t>(element_size) * num_elements;
-}
-
-inline size_t DexCacheArraysLayout::PairArraySize(PointerSize element_size, uint32_t num_elements) {
-  return 2u * static_cast<size_t>(element_size) * num_elements;
-}
-
-}  // namespace art
-
-#endif  // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
diff --git a/runtime/utils/dex_cache_arrays_layout.h b/runtime/utils/dex_cache_arrays_layout.h
deleted file mode 100644
index 6f689f3..0000000
--- a/runtime/utils/dex_cache_arrays_layout.h
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_
-#define ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_
-
-#include "dex/dex_file.h"
-#include "dex/dex_file_types.h"
-
-namespace art {
-
-/**
- * @class DexCacheArraysLayout
- * @details This class provides the layout information for the type, method, field and
- * string arrays for a DexCache with a fixed arrays' layout (such as in the boot image),
- */
-class DexCacheArraysLayout {
- public:
-  // Construct an invalid layout.
-  DexCacheArraysLayout()
-      : /* types_offset_ is always 0u */
-        pointer_size_(kRuntimePointerSize),
-        methods_offset_(0u),
-        strings_offset_(0u),
-        fields_offset_(0u),
-        method_types_offset_(0u),
-        call_sites_offset_(0u),
-        size_(0u) {
-  }
-
-  // Construct a layout for a particular dex file header.
-  DexCacheArraysLayout(PointerSize pointer_size,
-                       const DexFile::Header& header,
-                       uint32_t num_call_sites);
-
-  // Construct a layout for a particular dex file.
-  DexCacheArraysLayout(PointerSize pointer_size, const DexFile* dex_file);
-
-  bool Valid() const {
-    return Size() != 0u;
-  }
-
-  size_t Size() const {
-    return size_;
-  }
-
-  size_t Alignment() const;
-
-  static constexpr size_t Alignment(PointerSize pointer_size);
-
-  size_t TypesOffset() const {
-    return types_offset_;
-  }
-
-  size_t TypeOffset(dex::TypeIndex type_idx) const;
-
-  size_t TypesSize(size_t num_elements) const;
-
-  size_t TypesAlignment() const;
-
-  size_t MethodsOffset() const {
-    return methods_offset_;
-  }
-
-  size_t MethodOffset(uint32_t method_idx) const;
-
-  size_t MethodsSize(size_t num_elements) const;
-
-  size_t MethodsAlignment() const;
-
-  size_t StringsOffset() const {
-    return strings_offset_;
-  }
-
-  size_t StringOffset(uint32_t string_idx) const;
-
-  size_t StringsSize(size_t num_elements) const;
-
-  size_t StringsAlignment() const;
-
-  size_t FieldsOffset() const {
-    return fields_offset_;
-  }
-
-  size_t FieldOffset(uint32_t field_idx) const;
-
-  size_t FieldsSize(size_t num_elements) const;
-
-  size_t FieldsAlignment() const;
-
-  size_t MethodTypesOffset() const {
-    return method_types_offset_;
-  }
-
-  size_t MethodTypesSize(size_t num_elements) const;
-
-  size_t MethodTypesAlignment() const;
-
-  size_t CallSitesOffset() const {
-    return call_sites_offset_;
-  }
-
-  size_t CallSitesSize(size_t num_elements) const;
-
-  size_t CallSitesAlignment() const;
-
- private:
-  static constexpr size_t types_offset_ = 0u;
-  const PointerSize pointer_size_;  // Must be first for construction initialization order.
-  const size_t methods_offset_;
-  const size_t strings_offset_;
-  const size_t fields_offset_;
-  const size_t method_types_offset_;
-  const size_t call_sites_offset_;
-  const size_t size_;
-
-  static size_t ElementOffset(PointerSize element_size, uint32_t idx);
-
-  static size_t ArraySize(PointerSize element_size, uint32_t num_elements);
-  static size_t PairArraySize(PointerSize element_size, uint32_t num_elements);
-};
-
-}  // namespace art
-
-#endif  // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_