Avoid unnecessary read barriers in `ImageWriter`.

Also avoid visiting native roots during the same stages,
namely layout and writing (including copy-and-fixup).

Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 181943478
Change-Id: I0d62d67556109dbcbd8a04fd0e8408733dacce33
diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc
index 38a645b..6e7f52e 100644
--- a/dex2oat/linker/image_writer.cc
+++ b/dex2oat/linker/image_writer.cc
@@ -55,8 +55,9 @@
 #include "handle_scope-inl.h"
 #include "image-inl.h"
 #include "imt_conflict_table.h"
+#include "indirect_reference_table-inl.h"
 #include "intern_table-inl.h"
-#include "jni/java_vm_ext.h"
+#include "jni/java_vm_ext-inl.h"
 #include "jni/jni_internal.h"
 #include "linear_alloc.h"
 #include "lock_word.h"
@@ -168,7 +169,7 @@
 // Separate objects into multiple bins to optimize dirty memory use.
 static constexpr bool kBinObjects = true;
 
-ObjPtr<mirror::ObjectArray<mirror::Object>> AllocateBootImageLiveObjects(
+static ObjPtr<mirror::ObjectArray<mirror::Object>> AllocateBootImageLiveObjects(
     Thread* self, Runtime* runtime) REQUIRES_SHARED(Locks::mutator_lock_) {
   ClassLinker* class_linker = runtime->GetClassLinker();
   // The objects used for the Integer.valueOf() intrinsic must remain live even if references
@@ -221,6 +222,20 @@
   return live_objects;
 }
 
+template <typename MirrorType>
+ObjPtr<MirrorType> ImageWriter::DecodeGlobalWithoutRB(JavaVMExt* vm, jobject obj) {
+  DCHECK_EQ(IndirectReferenceTable::GetIndirectRefKind(obj), kGlobal);
+  return ObjPtr<MirrorType>::DownCast(vm->globals_.Get<kWithoutReadBarrier>(obj));
+}
+
+template <typename MirrorType>
+ObjPtr<MirrorType> ImageWriter::DecodeWeakGlobalWithoutRB(
+    JavaVMExt* vm, Thread* self, jobject obj) {
+  DCHECK_EQ(IndirectReferenceTable::GetIndirectRefKind(obj), kWeakGlobal);
+  DCHECK(vm->MayAccessWeakGlobals(self));
+  return ObjPtr<MirrorType>::DownCast(vm->weak_globals_.Get<kWithoutReadBarrier>(obj));
+}
+
 ObjPtr<mirror::ClassLoader> ImageWriter::GetAppClassLoader() const
     REQUIRES_SHARED(Locks::mutator_lock_) {
   return compiler_options_.IsAppImage()
@@ -297,12 +312,15 @@
     CheckNonImageClassesRemoved();
   }
 
+  // From this point on, there should be no GC, so we should not use unnecessary read barriers.
+  ScopedDebugDisallowReadBarriers sddrb(self);
+
   {
     // All remaining weak interns are referenced. Promote them to strong interns. Whether a
     // string was strongly or weakly interned, we shall make it strongly interned in the image.
     TimingLogger::ScopedTiming t("PromoteInterns", timings);
     ScopedObjectAccess soa(self);
-    Runtime::Current()->GetInternTable()->PromoteWeakToStrong();
+    PromoteWeakInternsToStrong(self);
   }
 
   {
@@ -333,12 +351,23 @@
             sfo_section_base);
 }
 
+// NO_THREAD_SAFETY_ANALYSIS: Avoid locking the `Locks::intern_table_lock_` while single-threaded.
+bool ImageWriter::IsStronglyInternedString(ObjPtr<mirror::String> str) NO_THREAD_SAFETY_ANALYSIS {
+  InternTable* intern_table = Runtime::Current()->GetInternTable();
+  for (InternTable::Table::InternalTable& table : intern_table->strong_interns_.tables_) {
+    auto it = table.set_.find(GcRoot<mirror::String>(str));
+    if (it != table.set_.end()) {
+      return it->Read<kWithoutReadBarrier>() == str;
+    }
+  }
+  return false;
+}
+
 bool ImageWriter::IsInternedAppImageStringReference(ObjPtr<mirror::Object> referred_obj) const {
   return referred_obj != nullptr &&
          !IsInBootImage(referred_obj.Ptr()) &&
          referred_obj->IsString() &&
-         referred_obj == Runtime::Current()->GetInternTable()->LookupStrong(
-             Thread::Current(), referred_obj->AsString());
+         IsStronglyInternedString(referred_obj->AsString());
 }
 
 // Helper class that erases the image file if it isn't properly flushed and closed.
@@ -406,6 +435,7 @@
   CHECK_EQ(image_filenames.size(), oat_filenames_.size());
 
   Thread* const self = Thread::Current();
+  ScopedDebugDisallowReadBarriers sddrb(self);
   {
     ScopedObjectAccess soa(self);
     for (size_t i = 0; i < oat_filenames_.size(); ++i) {
@@ -638,7 +668,7 @@
       saved_hashcode_map_.insert(std::make_pair(object, lw.GetHashCode()));
       break;
     default:
-      LOG(FATAL) << "Unreachable.";
+      LOG(FATAL) << "UNREACHABLE";
       UNREACHABLE();
   }
   object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()),
@@ -686,9 +716,10 @@
     // We assume that "regular" bin objects are highly unlikely to become dirtied,
     // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
     //
-    if (object->IsClass()) {
+    ObjPtr<mirror::Class> klass = object->GetClass<kVerifyNone, kWithoutReadBarrier>();
+    if (klass->IsClassClass()) {
       bin = Bin::kClassVerified;
-      ObjPtr<mirror::Class> klass = object->AsClass();
+      ObjPtr<mirror::Class> as_klass = object->AsClass<kVerifyNone>();
 
       // Move known dirty objects into their own sections. This includes:
       //   - classes with dirty static fields.
@@ -697,21 +728,21 @@
         std::string_view descriptor = k->GetDescriptor(&temp);
         return dirty_image_objects_->find(descriptor) != dirty_image_objects_->end();
       };
-      if (dirty_image_objects_ != nullptr && is_dirty(klass)) {
+      if (dirty_image_objects_ != nullptr && is_dirty(as_klass)) {
         bin = Bin::kKnownDirty;
-      } else if (klass->GetStatus() == ClassStatus::kVisiblyInitialized) {
+      } else if (as_klass->IsVisiblyInitialized<kVerifyNone>()) {
         bin = Bin::kClassInitialized;
 
         // If the class's static fields are all final, put it into a separate bin
         // since it's very likely it will stay clean.
-        uint32_t num_static_fields = klass->NumStaticFields();
+        uint32_t num_static_fields = as_klass->NumStaticFields();
         if (num_static_fields == 0) {
           bin = Bin::kClassInitializedFinalStatics;
         } else {
           // Maybe all the statics are final?
           bool all_final = true;
           for (uint32_t i = 0; i < num_static_fields; ++i) {
-            ArtField* field = klass->GetStaticField(i);
+            ArtField* field = as_klass->GetStaticField(i);
             if (!field->IsFinal()) {
               all_final = false;
               break;
@@ -723,13 +754,16 @@
           }
         }
       }
-    } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
+    } else if (klass->IsStringClass<kVerifyNone>()) {
       bin = Bin::kString;  // Strings are almost always immutable (except for object header).
-    } else if (object->GetClass<kVerifyNone>() == GetClassRoot<mirror::Object>()) {
+    } else if (!klass->HasSuperClass()) {
+      // Only `j.l.Object` and primitive classes lack the superclass and
+      // there are no instances of primitive classes.
+      DCHECK(klass->IsObjectClass());
       // Instance of java lang object, probably a lock object. This means it will be dirty when we
       // synchronize on it.
       bin = Bin::kMiscDirty;
-    } else if (object->IsDexCache()) {
+    } else if (klass->IsDexCacheClass<kVerifyNone>()) {
       // Dex file field becomes dirty when the image is loaded.
       bin = Bin::kMiscDirty;
     }
@@ -773,7 +807,7 @@
   if (m->IsNative()) {
     return true;
   }
-  ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClass();
+  ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClass<kWithoutReadBarrier>();
   // Initialized is highly unlikely to dirty since there's no entry points to mutate.
   return declaring_class == nullptr ||
          declaring_class->GetStatus() != ClassStatus::kVisiblyInitialized;
@@ -851,15 +885,6 @@
   return true;
 }
 
-static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)
-    REQUIRES_SHARED(Locks::mutator_lock_) {
-  return klass->GetClassLoader() == nullptr;
-}
-
-bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
-  return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
-}
-
 // This visitor follows the references of an instance, recursively then prune this class
 // if a type of any field is pruned.
 class ImageWriter::PruneObjectReferenceVisitor {
@@ -954,7 +979,7 @@
     return false;
   }
   visited->insert(klass.Ptr());
-  bool result = IsBootClassLoaderClass(klass);
+  bool result = klass->IsBootStrapClassLoaded();
   std::string temp;
   // Prune if not an image class, this handles any broken sets of image classes such as having a
   // class in the set but not it's superclass.
@@ -1197,6 +1222,17 @@
   heap->VisitObjects(visitor);
 }
 
+void ImageWriter::PromoteWeakInternsToStrong(Thread* self) {
+  InternTable* intern_table = Runtime::Current()->GetInternTable();
+  MutexLock mu(self, *Locks::intern_table_lock_);
+  DCHECK_EQ(intern_table->weak_interns_.tables_.size(), 1u);
+  for (GcRoot<mirror::String>& entry : intern_table->weak_interns_.tables_.front().set_) {
+    DCHECK(!IsStronglyInternedString(entry.Read<kWithoutReadBarrier>()));
+    intern_table->InsertStrong(entry.Read<kWithoutReadBarrier>());
+  }
+  intern_table->weak_interns_.tables_.front().set_.clear();
+}
+
 void ImageWriter::DumpImageClasses() {
   for (const std::string& image_class : compiler_options_.GetImageClasses()) {
     LOG(INFO) << " " << image_class;
@@ -1234,6 +1270,7 @@
   size_t num_oat_files = oat_filenames_.size();
   dchecked_vector<size_t> dex_cache_counts(num_oat_files, 0u);
   dchecked_vector<DexCacheRecord> dex_cache_records;
+  dex_cache_records.reserve(dex_file_oat_index_map_.size());
   {
     ReaderMutexLock mu(self, *Locks::dex_lock_);
     // Count number of dex caches not in the boot image.
@@ -1327,7 +1364,7 @@
   DCHECK(!klass->IsErroneous()) << klass->GetStatus();
   if (compiler_options_.IsAppImage()) {
     // Extra consistency check: no boot loader classes should be left!
-    CHECK(!IsBootClassLoaderClass(klass)) << klass->PrettyClass();
+    CHECK(!klass->IsBootStrapClassLoaded()) << klass->PrettyClass();
   }
   LengthPrefixedArray<ArtField>* fields[] = {
       klass->GetSFieldsPtr(), klass->GetIFieldsPtr(),
@@ -1501,7 +1538,6 @@
 
  private:
   class CollectClassesVisitor;
-  class CollectRootsVisitor;
   class CollectStringReferenceVisitor;
   class VisitReferencesVisitor;
 
@@ -1533,19 +1569,19 @@
   dchecked_vector<mirror::String*> non_dex_file_interns_;
 };
 
-class ImageWriter::LayoutHelper::CollectClassesVisitor : public ClassVisitor {
+class ImageWriter::LayoutHelper::CollectClassesVisitor {
  public:
   explicit CollectClassesVisitor(ImageWriter* image_writer)
       : image_writer_(image_writer),
         dex_files_(image_writer_->compiler_options_.GetDexFilesForOatFile()) {}
 
-  bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
+  bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
     if (!image_writer_->IsInBootImage(klass.Ptr())) {
       ObjPtr<mirror::Class> component_type = klass;
       size_t dimension = 0u;
-      while (component_type->IsArrayClass()) {
+      while (component_type->IsArrayClass<kVerifyNone>()) {
         ++dimension;
-        component_type = component_type->GetComponentType();
+        component_type = component_type->GetComponentType<kVerifyNone, kWithoutReadBarrier>();
       }
       DCHECK(!component_type->IsProxyClass());
       size_t dex_file_index;
@@ -1596,7 +1632,10 @@
       // Log the non-boot image class count for app image for debugging purposes.
       VLOG(compiler) << "Dex2Oat:AppImage:classCount = " << image_info.class_table_size_;
       // Collect boot image classes referenced by app class loader's class table.
-      ClassTable* app_class_table = image_writer->GetAppClassLoader()->GetClassTable();
+      JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
+      auto app_class_loader = DecodeGlobalWithoutRB<mirror::ClassLoader>(
+          vm, image_writer->app_class_loader_);
+      ClassTable* app_class_table = app_class_loader->GetClassTable();
       ReaderMutexLock lock(self, app_class_table->lock_);
       DCHECK_EQ(app_class_table->classes_.size(), 1u);
       const ClassTable::ClassSet& app_class_set = app_class_table->classes_[0];
@@ -1684,30 +1723,10 @@
   };
 
   ImageWriter* const image_writer_;
-  ArrayRef<const DexFile* const> dex_files_;
+  const ArrayRef<const DexFile* const> dex_files_;
   std::deque<ClassEntry> klasses_;
 };
 
-class ImageWriter::LayoutHelper::CollectRootsVisitor {
- public:
-  CollectRootsVisitor() = default;
-
-  std::vector<ObjPtr<mirror::Object>> ReleaseRoots() {
-    std::vector<ObjPtr<mirror::Object>> roots;
-    roots.swap(roots_);
-    return roots;
-  }
-
-  void VisitRootIfNonNull(StackReference<mirror::Object>* ref) {
-    if (!ref->IsNull()) {
-      roots_.push_back(ref->AsMirrorPtr());
-    }
-  }
-
- private:
-  std::vector<ObjPtr<mirror::Object>> roots_;
-};
-
 class ImageWriter::LayoutHelper::CollectStringReferenceVisitor {
  public:
   explicit CollectStringReferenceVisitor(
@@ -1730,7 +1749,7 @@
   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
       REQUIRES_SHARED(Locks::mutator_lock_)  {
     // Only dex caches have native String roots. These are collected separately.
-    DCHECK(current_obj_->IsDexCache() ||
+    DCHECK((current_obj_->IsDexCache<kVerifyNone, kWithoutReadBarrier>()) ||
            !image_writer_->IsInternedAppImageStringReference(root->AsMirrorPtr()))
         << mirror::Object::PrettyTypeOf(current_obj_);
   }
@@ -1768,26 +1787,22 @@
   VisitReferencesVisitor(LayoutHelper* helper, size_t oat_index)
       : helper_(helper), oat_index_(oat_index) {}
 
-  // Fix up separately since we also need to fix up method entrypoints.
-  ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    if (!root->IsNull()) {
-      VisitRoot(root);
-    }
+  // We do not visit native roots. These are handled with other logic.
+  void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
+      const {
+    LOG(FATAL) << "UNREACHABLE";
+  }
+  void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
+    LOG(FATAL) << "UNREACHABLE";
   }
 
-  ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    root->Assign(VisitReference(root->AsMirrorPtr()));
-  }
-
-  ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
-                                 MemberOffset offset,
-                                 bool is_static ATTRIBUTE_UNUSED) const
+  ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj,
+                                MemberOffset offset,
+                                bool is_static ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     mirror::Object* ref =
         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
-    obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref));
+    VisitReference(ref);
   }
 
   ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
@@ -1797,7 +1812,7 @@
   }
 
  private:
-  mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
+  void VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
     if (helper_->TryAssignBinSlot(ref, oat_index_)) {
       // Remember how many objects we're adding at the front of the queue as we want
       // to reverse that range to process these references in the order of addition.
@@ -1808,7 +1823,6 @@
         helper_->image_writer_->IsInternedAppImageStringReference(ref)) {
       helper_->image_writer_->image_infos_[oat_index_].num_string_references_ += 1u;
     }
-    return ref;
   }
 
   LayoutHelper* const helper_;
@@ -1850,13 +1864,43 @@
 void ImageWriter::LayoutHelper::ProcessDexFileObjects(Thread* self) {
   Runtime* runtime = Runtime::Current();
   ClassLinker* class_linker = runtime->GetClassLinker();
+  const CompilerOptions& compiler_options = image_writer_->compiler_options_;
+  JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
 
   // To ensure deterministic output, populate the work queue with objects in a pre-defined order.
   // Note: If we decide to implement a profile-guided layout, this is the place to do so.
 
   // Get initial work queue with the image classes and assign their bin slots.
   CollectClassesVisitor visitor(image_writer_);
-  class_linker->VisitClasses(&visitor);
+  {
+    WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
+    if (compiler_options.IsBootImage() || compiler_options.IsBootImageExtension()) {
+      // No need to filter based on class loader, boot class table contains only
+      // classes defined by the boot class loader.
+      ClassTable* class_table = class_linker->boot_class_table_.get();
+      class_table->Visit<kWithoutReadBarrier>(visitor);
+    } else {
+      // No need to visit boot class table as there are no classes there for the app image.
+      for (const ClassLinker::ClassLoaderData& data : class_linker->class_loaders_) {
+        auto class_loader =
+            DecodeWeakGlobalWithoutRB<mirror::ClassLoader>(vm, self, data.weak_root);
+        if (class_loader != nullptr) {
+          ClassTable* class_table = class_loader->GetClassTable();
+          if (class_table != nullptr) {
+            // Visit only classes defined in this class loader (avoid visiting multiple times).
+            auto filtering_visitor = [&visitor, class_loader](ObjPtr<mirror::Class> klass)
+                REQUIRES_SHARED(Locks::mutator_lock_) {
+              if (klass->GetClassLoader<kVerifyNone, kWithoutReadBarrier>() == class_loader) {
+                visitor(klass);
+              }
+              return true;
+            };
+            class_table->Visit<kWithoutReadBarrier>(filtering_visitor);
+          }
+        }
+      }
+    }
+  }
   DCHECK(work_queue_.empty());
   work_queue_ = visitor.ProcessCollectedClasses(self);
   for (const std::pair<ObjPtr<mirror::Object>, size_t>& entry : work_queue_) {
@@ -1881,16 +1925,22 @@
   }
 
   // Assign bin slots to dex caches.
-  for (const DexFile* dex_file : image_writer_->compiler_options_.GetDexFilesForOatFile()) {
-    auto it = image_writer_->dex_file_oat_index_map_.find(dex_file);
-    DCHECK(it != image_writer_->dex_file_oat_index_map_.end()) << dex_file->GetLocation();
-    const size_t oat_index = it->second;
-    // Assign bin slot to this file's dex cache and add it to the end of the work queue.
-    ObjPtr<mirror::DexCache> dex_cache = class_linker->FindDexCache(self, *dex_file);
-    DCHECK(dex_cache != nullptr);
-    bool assigned = TryAssignBinSlot(dex_cache, oat_index);
-    DCHECK(assigned);
-    work_queue_.emplace_back(dex_cache, oat_index);
+  {
+    ReaderMutexLock mu(self, *Locks::dex_lock_);
+    for (const DexFile* dex_file : compiler_options.GetDexFilesForOatFile()) {
+      auto it = image_writer_->dex_file_oat_index_map_.find(dex_file);
+      DCHECK(it != image_writer_->dex_file_oat_index_map_.end()) << dex_file->GetLocation();
+      const size_t oat_index = it->second;
+      // Assign bin slot to this file's dex cache and add it to the end of the work queue.
+      auto dcd_it = class_linker->GetDexCachesData().find(dex_file);
+      DCHECK(dcd_it != class_linker->GetDexCachesData().end()) << dex_file->GetLocation();
+      auto dex_cache =
+          DecodeWeakGlobalWithoutRB<mirror::DexCache>(vm, self, dcd_it->second.weak_root);
+      DCHECK(dex_cache != nullptr);
+      bool assigned = TryAssignBinSlot(dex_cache, oat_index);
+      DCHECK(assigned);
+      work_queue_.emplace_back(dex_cache, oat_index);
+    }
   }
 
   // Assign interns to images depending on the first dex file they appear in.
@@ -1913,10 +1963,11 @@
 
   constexpr Bin clean_bin = kBinObjects ? Bin::kInternalClean : Bin::kRegular;
   size_t num_oat_files = image_writer_->oat_filenames_.size();
+  JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
   for (size_t oat_index = 0; oat_index != num_oat_files; ++oat_index) {
     // Put image roots and dex caches into `clean_bin`.
-    auto image_roots = ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
-        self->DecodeJObject(image_writer_->image_roots_[oat_index]));
+    auto image_roots = DecodeGlobalWithoutRB<mirror::ObjectArray<mirror::Object>>(
+       vm, image_writer_->image_roots_[oat_index]);
     AssignImageBinSlot(image_roots, oat_index, clean_bin);
     work_queue_.emplace_back(image_roots, oat_index);
     // Do not rely on the `work_queue_` for dex cache arrays, it would assign a different bin.
@@ -2099,16 +2150,17 @@
 
 void ImageWriter::LayoutHelper::VerifyImageBinSlotsAssigned() {
   dchecked_vector<mirror::Object*> carveout;
+  JavaVMExt* vm = nullptr;
   if (image_writer_->compiler_options_.IsAppImage()) {
     // Exclude boot class path dex caches that are not part of the boot image.
     // Also exclude their locations if they have not been visited through another path.
     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
     Thread* self = Thread::Current();
+    vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
     ReaderMutexLock mu(self, *Locks::dex_lock_);
     for (const auto& entry : class_linker->GetDexCachesData()) {
       const ClassLinker::DexCacheData& data = entry.second;
-      ObjPtr<mirror::DexCache> dex_cache =
-          ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
+      auto dex_cache = DecodeWeakGlobalWithoutRB<mirror::DexCache>(vm, self, data.weak_root);
       if (dex_cache == nullptr ||
           image_writer_->IsInBootImage(dex_cache.Ptr()) ||
           ContainsElement(image_writer_->compiler_options_.GetDexFilesForOatFile(),
@@ -2117,7 +2169,7 @@
       }
       CHECK(!image_writer_->IsImageBinSlotAssigned(dex_cache.Ptr()));
       carveout.push_back(dex_cache.Ptr());
-      ObjPtr<mirror::String> location = dex_cache->GetLocation();
+      ObjPtr<mirror::String> location = dex_cache->GetLocation<kVerifyNone, kWithoutReadBarrier>();
       if (!image_writer_->IsImageBinSlotAssigned(location.Ptr())) {
         carveout.push_back(location.Ptr());
       }
@@ -2135,30 +2187,31 @@
         }
         // Ignore finalizer references for the dalvik.system.DexFile objects referenced by
         // the app class loader.
-        if (obj->IsFinalizerReferenceInstance()) {
-          DCHECK(obj->GetClass()->GetSuperClass()->DescriptorEquals("Ljava/lang/ref/Reference;"));
-          ArtField* ref_field = obj->GetClass()->GetSuperClass()->FindDeclaredInstanceField(
+        ObjPtr<mirror::Class> klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
+        if (klass->IsFinalizerReferenceClass<kVerifyNone>()) {
+          ObjPtr<mirror::Class> reference_class =
+              klass->GetSuperClass<kVerifyNone, kWithoutReadBarrier>();
+          DCHECK(reference_class->DescriptorEquals("Ljava/lang/ref/Reference;"));
+          ArtField* ref_field = reference_class->FindDeclaredInstanceField(
               "referent", "Ljava/lang/Object;");
           CHECK(ref_field != nullptr);
-          ObjPtr<mirror::Object> ref = ref_field->GetObject(obj);
+          ObjPtr<mirror::Object> ref = ref_field->GetObject<kWithoutReadBarrier>(obj);
           CHECK(ref != nullptr);
           CHECK(image_writer_->IsImageBinSlotAssigned(ref.Ptr()));
-          ObjPtr<mirror::Class> klass = ref->GetClass();
-          CHECK(klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile));
+          ObjPtr<mirror::Class> ref_klass = ref->GetClass<kVerifyNone, kWithoutReadBarrier>();
+          CHECK(ref_klass ==
+                DecodeGlobalWithoutRB<mirror::Class>(vm, WellKnownClasses::dalvik_system_DexFile));
           // Note: The app class loader is used only for checking against the runtime
           // class loader, the dex file cookie is cleared and therefore we do not need
           // to run the finalizer even if we implement app image objects collection.
           ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
-          CHECK(field->GetObject(ref) == nullptr);
+          CHECK(field->GetObject<kWithoutReadBarrier>(ref) == nullptr);
           return;
         }
-        if (obj->IsString()) {
+        if (klass->IsStringClass()) {
           // Ignore interned strings. These may come from reflection interning method names.
           // TODO: Make dex file strings weak interns and GC them before writing the image.
-          Runtime* runtime = Runtime::Current();
-          ObjPtr<mirror::String> interned =
-              runtime->GetInternTable()->LookupStrong(Thread::Current(), obj->AsString());
-          if (interned == obj) {
+          if (IsStronglyInternedString(obj->AsString())) {
             return;
           }
         }
@@ -2322,7 +2375,7 @@
   size_t old_work_queue_size = work_queue_.size();
   VisitReferencesVisitor visitor(this, oat_index);
   // Walk references and assign bin slots for them.
-  obj->VisitReferences</*kVisitNativeRoots=*/ true, kVerifyNone, kWithoutReadBarrier>(
+  obj->VisitReferences</*kVisitNativeRoots=*/ false, kVerifyNone, kWithoutReadBarrier>(
       visitor,
       visitor);
   // Put the added references in the queue in the order in which they were added.
@@ -2403,10 +2456,11 @@
   // From this point on, there shall be no GC anymore and no objects shall be allocated.
   // We can now assign a BitSlot to each object and store it in its lockword.
 
+  JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
   if (compiler_options_.IsBootImage() || compiler_options_.IsBootImageExtension()) {
     // Record the address of boot image live objects.
-    auto image_roots = ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
-        self->DecodeJObject(image_roots_[0]));
+    auto image_roots = DecodeGlobalWithoutRB<mirror::ObjectArray<mirror::Object>>(
+        vm, image_roots_[0]);
     boot_image_live_objects_ = ObjPtr<ObjectArray<Object>>::DownCast(
         image_roots->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(
             ImageHeader::kBootImageLiveObjects)).Ptr();
@@ -2440,8 +2494,8 @@
 
   size_t oat_index = 0;
   for (ImageInfo& image_info : image_infos_) {
-    auto image_roots = ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
-        self->DecodeJObject(image_roots_[oat_index]));
+    auto image_roots = DecodeGlobalWithoutRB<mirror::ObjectArray<mirror::Object>>(
+        vm, image_roots_[oat_index]);
     image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Ptr()));
     ++oat_index;
   }
@@ -2755,7 +2809,7 @@
         for (size_t i = 0; i != size; ++i) {
           CopyAndFixupReference(
               dest_array->At(i).GetDeclaringClassAddressWithoutBarrier(),
-              src_array->At(i).GetDeclaringClass());
+              src_array->At(i).GetDeclaringClass<kWithoutReadBarrier>());
         }
         break;
       }
@@ -2866,11 +2920,12 @@
   // Therefore we know that this array has not been copied yet.
   mirror::Object* dst = CopyObject</*kCheckIfDone=*/ false>(arr);
   DCHECK(dst != nullptr);
-  DCHECK(arr->IsIntArray() || arr->IsLongArray()) << arr->GetClass()->PrettyClass() << " " << arr;
+  DCHECK(arr->IsIntArray() || arr->IsLongArray())
+      << arr->GetClass<kVerifyNone, kWithoutReadBarrier>()->PrettyClass() << " " << arr;
   // Fixup int and long pointers for the ArtMethod or ArtField arrays.
   const size_t num_elements = arr->GetLength();
-  CopyAndFixupReference(
-      dst->GetFieldObjectReferenceAddr<kVerifyNone>(Class::ClassOffset()), arr->GetClass());
+  CopyAndFixupReference(dst->GetFieldObjectReferenceAddr<kVerifyNone>(Class::ClassOffset()),
+                        arr->GetClass<kVerifyNone, kWithoutReadBarrier>());
   auto* dest_array = down_cast<mirror::PointerArray*>(dst);
   for (size_t i = 0, count = num_elements; i < count; ++i) {
     void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
@@ -2880,7 +2935,7 @@
         auto* method = reinterpret_cast<ArtMethod*>(elem);
         LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ "
                    << method << " idx=" << i << "/" << num_elements << " with declaring class "
-                   << Class::PrettyClass(method->GetDeclaringClass());
+                   << Class::PrettyClass(method->GetDeclaringClass<kWithoutReadBarrier>());
         UNREACHABLE();
       }
     }
@@ -2951,15 +3006,18 @@
       : image_writer_(image_writer), copy_(copy) {
   }
 
-  // Ignore class roots since we don't have a way to map them to the destination. These are handled
-  // with other logic.
+  // We do not visit native roots. These are handled with other logic.
   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
-      const {}
-  void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+      const {
+    LOG(FATAL) << "UNREACHABLE";
+  }
+  void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
+    LOG(FATAL) << "UNREACHABLE";
+  }
 
   void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
-    ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
+    ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone, kWithoutReadBarrier>(offset);
     // Copy the reference and record the fixup if necessary.
     image_writer_->CopyAndFixupReference(
         copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset), ref);
@@ -3084,7 +3142,7 @@
 
 ArtField* ImageWriter::NativeLocationInImage(ArtField* src_field) {
   // Fields are not individually stored in the native relocation map. Use the field array.
-  ObjPtr<mirror::Class> declaring_class = src_field->GetDeclaringClass();
+  ObjPtr<mirror::Class> declaring_class = src_field->GetDeclaringClass<kWithoutReadBarrier>();
   LengthPrefixedArray<ArtField>* src_fields =
       src_field->IsStatic() ? declaring_class->GetSFieldsPtr() : declaring_class->GetIFieldsPtr();
   DCHECK(src_fields != nullptr);
@@ -3117,7 +3175,8 @@
 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
   orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
   FixupClassVisitor visitor(this, copy);
-  ObjPtr<mirror::Object>(orig)->VisitReferences(visitor, visitor);
+  ObjPtr<mirror::Object>(orig)->VisitReferences<
+      /*kVisitNativeRoots=*/ false, kVerifyNone, kWithoutReadBarrier>(visitor, visitor);
 
   if (kBitstringSubtypeCheckEnabled && !compiler_options_.IsBootImage()) {
     // When we call SubtypeCheck::EnsureInitialize, it Assigns new bitstring
@@ -3157,27 +3216,27 @@
   if (kUseBakerReadBarrier) {
     orig->AssertReadBarrierState();
   }
-  if (orig->IsClass()) {
+  ObjPtr<mirror::Class> klass = orig->GetClass<kVerifyNone, kWithoutReadBarrier>();
+  if (klass->IsClassClass()) {
     FixupClass(orig->AsClass<kVerifyNone>().Ptr(), down_cast<mirror::Class*>(copy));
   } else {
     ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots =
-        Runtime::Current()->GetClassLinker()->GetClassRoots();
-    ObjPtr<mirror::Class> klass = orig->GetClass();
-    if (klass == GetClassRoot<mirror::Method>(class_roots) ||
-        klass == GetClassRoot<mirror::Constructor>(class_roots)) {
+        Runtime::Current()->GetClassLinker()->GetClassRoots<kWithoutReadBarrier>();
+    if (klass == GetClassRoot<mirror::Method, kWithoutReadBarrier>(class_roots) ||
+        klass == GetClassRoot<mirror::Constructor, kWithoutReadBarrier>(class_roots)) {
       // Need to update the ArtMethod.
       auto* dest = down_cast<mirror::Executable*>(copy);
       auto* src = down_cast<mirror::Executable*>(orig);
       ArtMethod* src_method = src->GetArtMethod();
       CopyAndFixupPointer(dest, mirror::Executable::ArtMethodOffset(), src_method);
-    } else if (klass == GetClassRoot<mirror::FieldVarHandle>(class_roots) ||
-               klass == GetClassRoot<mirror::StaticFieldVarHandle>(class_roots)) {
+    } else if (klass == GetClassRoot<mirror::FieldVarHandle, kWithoutReadBarrier>(class_roots) ||
+         klass == GetClassRoot<mirror::StaticFieldVarHandle, kWithoutReadBarrier>(class_roots)) {
       // Need to update the ArtField.
       auto* dest = down_cast<mirror::FieldVarHandle*>(copy);
       auto* src = down_cast<mirror::FieldVarHandle*>(orig);
       ArtField* src_field = src->GetArtField();
       CopyAndFixupPointer(dest, mirror::FieldVarHandle::ArtFieldOffset(), src_field);
-    } else if (klass == GetClassRoot<mirror::DexCache>(class_roots)) {
+    } else if (klass == GetClassRoot<mirror::DexCache, kWithoutReadBarrier>(class_roots)) {
       down_cast<mirror::DexCache*>(copy)->SetDexFile(nullptr);
       down_cast<mirror::DexCache*>(copy)->ResetNativeArrays();
     } else if (klass->IsClassLoaderClass()) {
@@ -3191,7 +3250,8 @@
       copy_loader->SetAllocator(nullptr);
     }
     FixupVisitor visitor(this, copy);
-    orig->VisitReferences(visitor, visitor);
+    orig->VisitReferences</*kVisitNativeRoots=*/ false, kVerifyNone, kWithoutReadBarrier>(
+        visitor, visitor);
   }
 }
 
@@ -3246,7 +3306,7 @@
       method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
   const uint8_t* quick_code;
 
-  if (UNLIKELY(IsInBootImage(method->GetDeclaringClass().Ptr()))) {
+  if (UNLIKELY(IsInBootImage(method->GetDeclaringClass<kWithoutReadBarrier>().Ptr()))) {
     DCHECK(method->IsCopied());
     // If the code is not in the oat file corresponding to this image (e.g. default methods)
     quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
@@ -3256,7 +3316,7 @@
   }
 
   bool needs_clinit_check = NeedsClinitCheckBeforeCall(method) &&
-      !method->GetDeclaringClass()->IsVisiblyInitialized();
+      !method->GetDeclaringClass<kWithoutReadBarrier>()->IsVisiblyInitialized();
 
   if (quick_code == nullptr) {
     // If we don't have code, use generic jni / interpreter.
@@ -3298,8 +3358,8 @@
 
   memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
 
-  CopyAndFixupReference(
-      copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
+  CopyAndFixupReference(copy->GetDeclaringClassAddressWithoutBarrier(),
+                        orig->GetDeclaringClassUnchecked<kWithoutReadBarrier>());
 
   // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
   // oat_begin_
@@ -3434,13 +3494,13 @@
 
 size_t ImageWriter::GetOatIndexForClass(ObjPtr<mirror::Class> klass) const {
   while (klass->IsArrayClass()) {
-    klass = klass->GetComponentType();
+    klass = klass->GetComponentType<kVerifyNone, kWithoutReadBarrier>();
   }
   if (UNLIKELY(klass->IsPrimitive())) {
-    DCHECK(klass->GetDexCache() == nullptr);
+    DCHECK((klass->GetDexCache<kVerifyNone, kWithoutReadBarrier>()) == nullptr);
     return GetDefaultOatIndex();
   } else {
-    DCHECK(klass->GetDexCache() != nullptr);
+    DCHECK((klass->GetDexCache<kVerifyNone, kWithoutReadBarrier>()) != nullptr);
     return GetOatIndexForDexFile(&klass->GetDexFile());
   }
 }
diff --git a/dex2oat/linker/image_writer.h b/dex2oat/linker/image_writer.h
index 0c1cab8..e5eeacc 100644
--- a/dex2oat/linker/image_writer.h
+++ b/dex2oat/linker/image_writer.h
@@ -38,7 +38,6 @@
 #include "base/macros.h"
 #include "base/mem_map.h"
 #include "base/os.h"
-#include "base/safe_map.h"
 #include "base/utils.h"
 #include "class_table.h"
 #include "gc/accounting/space_bitmap.h"
@@ -69,6 +68,7 @@
 template<class T> class Handle;
 class ImTable;
 class ImtConflictTable;
+class JavaVMExt;
 class TimingLogger;
 
 namespace linker {
@@ -361,9 +361,6 @@
     // Image bitmap which lets us know where the objects inside of the image reside.
     gc::accounting::ContinuousSpaceBitmap image_bitmap_;
 
-    // The start offsets of the dex cache arrays.
-    SafeMap<const DexFile*, size_t> dex_cache_array_starts_;
-
     // Offset from oat_data_begin_ to the stubs.
     uint32_t stub_offsets_[kNumberOfStubTypes] = {};
 
@@ -521,9 +518,6 @@
   void TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  // Return true if klass is loaded by the boot class loader but not in the boot image.
-  bool IsBootClassLoaderNonImageClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_);
-
   // Return true if `klass` depends on a class defined by the boot class path
   // we're compiling against but not present in the boot image spaces. We want
   // to prune these classes since we cannot guarantee that they will not be
@@ -537,6 +531,8 @@
                                HashSet<mirror::Object*>* visited)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
+  void PromoteWeakInternsToStrong(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
+
   bool IsMultiImage() const {
     return image_infos_.size() > 1;
   }
@@ -568,6 +564,14 @@
     return reinterpret_cast<uintptr_t>(obj) - boot_image_begin_ < boot_image_size_;
   }
 
+  template <typename MirrorType>
+  static ObjPtr<MirrorType> DecodeGlobalWithoutRB(JavaVMExt* vm, jobject obj)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
+  template <typename MirrorType>
+  static ObjPtr<MirrorType> DecodeWeakGlobalWithoutRB(
+      JavaVMExt* vm, Thread* self, jobject obj) REQUIRES_SHARED(Locks::mutator_lock_);
+
   // Get the index of the oat file associated with the object.
   size_t GetOatIndex(mirror::Object* object) const REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -607,6 +611,10 @@
   void CopyAndFixupPointer(void* object, MemberOffset offset, ValueType src_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
+  ALWAYS_INLINE
+  static bool IsStronglyInternedString(ObjPtr<mirror::String> str)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
   /*
    * Tests an object to see if it will be contained in an AppImage.
    *
diff --git a/runtime/art_field-inl.h b/runtime/art_field-inl.h
index 5ab6d91..5f23f1e 100644
--- a/runtime/art_field-inl.h
+++ b/runtime/art_field-inl.h
@@ -109,14 +109,17 @@
   }
 }
 
-template<class MirrorType>
+template<class MirrorType, ReadBarrierOption kReadBarrierOption>
 inline ObjPtr<MirrorType> ArtField::GetObj(ObjPtr<mirror::Object> object) {
   DCHECK(object != nullptr) << PrettyField();
-  DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
+  DCHECK(!IsStatic() ||
+         (object == GetDeclaringClass<kReadBarrierOption>()) ||
+         !Runtime::Current()->IsStarted());
   if (UNLIKELY(IsVolatile())) {
-    return object->GetFieldObjectVolatile<MirrorType>(GetOffset());
+    return object->GetFieldObjectVolatile<MirrorType, kDefaultVerifyFlags, kReadBarrierOption>(
+        GetOffset());
   }
-  return object->GetFieldObject<MirrorType>(GetOffset());
+  return object->GetFieldObject<MirrorType, kDefaultVerifyFlags, kReadBarrierOption>(GetOffset());
 }
 
 template<bool kTransactionActive>
@@ -273,9 +276,10 @@
   Set64<kTransactionActive>(object, bits.GetJ());
 }
 
+template<ReadBarrierOption kReadBarrierOption>
 inline ObjPtr<mirror::Object> ArtField::GetObject(ObjPtr<mirror::Object> object) {
   DCHECK_EQ(Primitive::kPrimNot, GetTypeAsPrimitiveType()) << PrettyField();
-  return GetObj(object);
+  return GetObj<mirror::Object, kReadBarrierOption>(object);
 }
 
 template<bool kTransactionActive>
diff --git a/runtime/art_field.h b/runtime/art_field.h
index e2a88e4..c8d8e8c 100644
--- a/runtime/art_field.h
+++ b/runtime/art_field.h
@@ -143,6 +143,7 @@
   template<bool kTransactionActive>
   void SetDouble(ObjPtr<mirror::Object> object, double d) REQUIRES_SHARED(Locks::mutator_lock_);
 
+  template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   ObjPtr<mirror::Object> GetObject(ObjPtr<mirror::Object> object)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -163,7 +164,8 @@
   void Set64(ObjPtr<mirror::Object> object, uint64_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<class MirrorType = mirror::Object>
+  template<class MirrorType = mirror::Object,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   ObjPtr<MirrorType> GetObj(ObjPtr<mirror::Object> object)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 9d2755c..c64c137 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -2358,7 +2358,7 @@
   // Cleanup references to single implementation ArtMethods that will be deleted.
   if (cleanup_cha) {
     CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
-    data.class_table->Visit<CHAOnDeleteUpdateClassVisitor, kWithoutReadBarrier>(visitor);
+    data.class_table->Visit<kWithoutReadBarrier>(visitor);
   }
   {
     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
diff --git a/runtime/class_table-inl.h b/runtime/class_table-inl.h
index 8e44ee3..088ad3d 100644
--- a/runtime/class_table-inl.h
+++ b/runtime/class_table-inl.h
@@ -102,7 +102,7 @@
   }
 }
 
-template <typename Visitor, ReadBarrierOption kReadBarrierOption>
+template <ReadBarrierOption kReadBarrierOption, typename Visitor>
 bool ClassTable::Visit(Visitor& visitor) {
   ReaderMutexLock mu(Thread::Current(), lock_);
   for (ClassSet& class_set : classes_) {
@@ -115,7 +115,7 @@
   return true;
 }
 
-template <typename Visitor, ReadBarrierOption kReadBarrierOption>
+template <ReadBarrierOption kReadBarrierOption, typename Visitor>
 bool ClassTable::Visit(const Visitor& visitor) {
   ReaderMutexLock mu(Thread::Current(), lock_);
   for (ClassSet& class_set : classes_) {
diff --git a/runtime/class_table.h b/runtime/class_table.h
index 3bb212e..17b8786 100644
--- a/runtime/class_table.h
+++ b/runtime/class_table.h
@@ -189,11 +189,11 @@
       REQUIRES_SHARED(Locks::mutator_lock_);
 
   // Stops visit if the visitor returns false.
-  template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
   bool Visit(Visitor& visitor)
       REQUIRES(!lock_)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
   bool Visit(const Visitor& visitor)
       REQUIRES(!lock_)
       REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/intern_table.cc b/runtime/intern_table.cc
index 5d4c507..4da5453 100644
--- a/runtime/intern_table.cc
+++ b/runtime/intern_table.cc
@@ -276,16 +276,6 @@
   return Insert(s, true, true);
 }
 
-void InternTable::PromoteWeakToStrong() {
-  MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
-  DCHECK_EQ(weak_interns_.tables_.size(), 1u);
-  for (GcRoot<mirror::String>& entry : weak_interns_.tables_.front().set_) {
-    DCHECK(LookupStrongLocked(entry.Read()) == nullptr);
-    InsertStrong(entry.Read());
-  }
-  weak_interns_.tables_.front().set_.clear();
-}
-
 ObjPtr<mirror::String> InternTable::InternStrong(ObjPtr<mirror::String> s) {
   return Insert(s, true, false);
 }
diff --git a/runtime/intern_table.h b/runtime/intern_table.h
index 9d1720f..c5fe797 100644
--- a/runtime/intern_table.h
+++ b/runtime/intern_table.h
@@ -121,9 +121,6 @@
   ObjPtr<mirror::String> InternStrongImageString(ObjPtr<mirror::String> s)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  // Only used by image writer. Promote all weak interns to strong interns.
-  void PromoteWeakToStrong() REQUIRES_SHARED(Locks::mutator_lock_);
-
   // Interns a potentially new string in the 'strong' table. May cause thread suspension.
   ObjPtr<mirror::String> InternStrong(const char* utf8_data) REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(!Roles::uninterruptible_);
diff --git a/runtime/jni/java_vm_ext-inl.h b/runtime/jni/java_vm_ext-inl.h
new file mode 100644
index 0000000..29cdf1b
--- /dev/null
+++ b/runtime/jni/java_vm_ext-inl.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_JNI_JAVA_VM_EXT_INL_H_
+#define ART_RUNTIME_JNI_JAVA_VM_EXT_INL_H_
+
+#include "java_vm_ext.h"
+
+#include "read_barrier_config.h"
+#include "thread-inl.h"
+
+namespace art {
+
+inline bool JavaVMExt::MayAccessWeakGlobals(Thread* self) const {
+  DCHECK(self != nullptr);
+  return kUseReadBarrier
+      ? self->GetWeakRefAccessEnabled()
+      : allow_accessing_weak_globals_.load(std::memory_order_seq_cst);
+}
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_JNI_JAVA_VM_EXT_INL_H_
diff --git a/runtime/jni/java_vm_ext.cc b/runtime/jni/java_vm_ext.cc
index df7375b..f4a47eb 100644
--- a/runtime/jni/java_vm_ext.cc
+++ b/runtime/jni/java_vm_ext.cc
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include "java_vm_ext.h"
+#include "java_vm_ext-inl.h"
 
 #include <dlfcn.h>
 #include <string_view>
@@ -834,17 +834,6 @@
   globals_.Update(ref, result);
 }
 
-inline bool JavaVMExt::MayAccessWeakGlobals(Thread* self) const {
-  return MayAccessWeakGlobalsUnlocked(self);
-}
-
-inline bool JavaVMExt::MayAccessWeakGlobalsUnlocked(Thread* self) const {
-  DCHECK(self != nullptr);
-  return kUseReadBarrier ?
-      self->GetWeakRefAccessEnabled() :
-      allow_accessing_weak_globals_.load(std::memory_order_seq_cst);
-}
-
 ObjPtr<mirror::Object> JavaVMExt::DecodeWeakGlobal(Thread* self, IndirectRef ref) {
   // It is safe to access GetWeakRefAccessEnabled without the lock since CC uses checkpoints to call
   // SetWeakRefAccessEnabled, and the other collectors only modify allow_accessing_weak_globals_
@@ -853,7 +842,7 @@
   // case, it may be racy, this is benign since DecodeWeakGlobalLocked does the correct behavior
   // if MayAccessWeakGlobals is false.
   DCHECK_EQ(IndirectReferenceTable::GetIndirectRefKind(ref), kWeakGlobal);
-  if (LIKELY(MayAccessWeakGlobalsUnlocked(self))) {
+  if (LIKELY(MayAccessWeakGlobals(self))) {
     return weak_globals_.SynchronizedGet(ref);
   }
   MutexLock mu(self, *Locks::jni_weak_globals_lock_);
diff --git a/runtime/jni/java_vm_ext.h b/runtime/jni/java_vm_ext.h
index 50e8414..8fa716e 100644
--- a/runtime/jni/java_vm_ext.h
+++ b/runtime/jni/java_vm_ext.h
@@ -27,6 +27,10 @@
 
 namespace art {
 
+namespace linker {
+class ImageWriter;
+}  // namespace linker
+
 namespace mirror {
 class Array;
 }  // namespace mirror
@@ -219,10 +223,7 @@
   JavaVMExt(Runtime* runtime, const RuntimeArgumentMap& runtime_options, std::string* error_msg);
 
   // Return true if self can currently access weak globals.
-  bool MayAccessWeakGlobalsUnlocked(Thread* self) const REQUIRES_SHARED(Locks::mutator_lock_);
-  bool MayAccessWeakGlobals(Thread* self) const
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      REQUIRES(Locks::jni_weak_globals_lock_);
+  bool MayAccessWeakGlobals(Thread* self) const REQUIRES_SHARED(Locks::mutator_lock_);
 
   void WaitForWeakGlobalsAccess(Thread* self)
       REQUIRES_SHARED(Locks::mutator_lock_)
@@ -281,7 +282,7 @@
   uint32_t global_ref_report_counter_ GUARDED_BY(Locks::jni_globals_lock_)
       = kGlobalRefReportInterval;
 
-
+  friend class linker::ImageWriter;  // Uses `globals_` and `weak_globals_` without read barrier.
   friend IndirectReferenceTable* GetIndirectReferenceTable(ScopedObjectAccess& soa,
                                                            IndirectRefKind kind);
 
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index e04aed5..2791fe3 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -422,8 +422,10 @@
   }
 }
 
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline ObjPtr<String> DexCache::GetLocation() {
-  return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
+  return GetFieldObject<String, kVerifyFlags, kReadBarrierOption>(
+      OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
 }
 
 }  // namespace mirror
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 12f1985..6701405 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -197,6 +197,8 @@
   // WARNING: This does not free the memory since it is in LinearAlloc.
   void ResetNativeArrays() REQUIRES_SHARED(Locks::mutator_lock_);
 
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   ObjPtr<String> GetLocation() REQUIRES_SHARED(Locks::mutator_lock_);
 
   static constexpr MemberOffset StringsOffset() {