Revert^2 "Support using opaque JNI ids"

We weren't handing the fact that encoding jmethodIDs could cause OOM
exceptions in some places in reflection.cc. This could lead to
attempting to use a null jmethodID as if it were a real id. This issue
is fixed by the parent CL.

This reverts commit b476a29a2c.
This reverts commit 3b2905366c.
This Unreverts commit d5d645ffec.
This Unreverts commit 21d5994583.

Reason for revert: Fixed issue causing 004 debuggable flakes
Test: ./test.py --host --jit --debuggable
Bug: 134162467

Change-Id: Iece08ab299cd8a20f8382be7be6c3796858e70eb
diff --git a/openjdkjvmti/ti_heap.cc b/openjdkjvmti/ti_heap.cc
index 3d99ed8..898363a 100644
--- a/openjdkjvmti/ti_heap.cc
+++ b/openjdkjvmti/ti_heap.cc
@@ -21,12 +21,15 @@
 #include "base/macros.h"
 #include "base/mutex.h"
 #include "class_linker.h"
+#include "class_root.h"
 #include "dex/primitive.h"
 #include "gc/heap-visit-objects-inl.h"
 #include "gc/heap.h"
 #include "gc_root-inl.h"
+#include "handle_scope.h"
 #include "java_frame_root_info.h"
 #include "jni/jni_env_ext.h"
+#include "jni/jni_id_manager.h"
 #include "jni/jni_internal.h"
 #include "jvmti_weak_table-inl.h"
 #include "mirror/class.h"
@@ -36,9 +39,11 @@
 #include "object_tagging.h"
 #include "runtime.h"
 #include "scoped_thread_state_change-inl.h"
+#include "scoped_thread_state_change.h"
 #include "stack.h"
 #include "thread-inl.h"
 #include "thread_list.h"
+#include "well_known_classes.h"
 
 namespace openjdkjvmti {
 
@@ -1378,6 +1383,7 @@
   }
   {
     art::ScopedObjectAccess soa(self);      // Now we know we have the shared lock.
+    art::jni::ScopedEnableSuspendAllJniIdQueries sjni;  // make sure we can get JNI ids.
     art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
     art::ScopedSuspendAll ssa("FollowReferences");
 
diff --git a/openjdkjvmti/ti_method.cc b/openjdkjvmti/ti_method.cc
index d8ee981..defcd54 100644
--- a/openjdkjvmti/ti_method.cc
+++ b/openjdkjvmti/ti_method.cc
@@ -93,12 +93,13 @@
       ScopedLocalRef<jthread> thread_jni(
           jnienv, PhaseUtil::IsLivePhase() ? jnienv->AddLocalReference<jthread>(thread->GetPeer())
                                            : nullptr);
+      jmethodID method_id = art::jni::EncodeArtMethod(method);
       art::ScopedThreadSuspension sts(thread, art::ThreadState::kNative);
       event_handler->DispatchEvent<ArtJvmtiEvent::kNativeMethodBind>(
           thread,
           static_cast<JNIEnv*>(jnienv),
           thread_jni.get(),
-          art::jni::EncodeArtMethod(method),
+          method_id,
           const_cast<void*>(cur_method),
           new_method);
     }
diff --git a/runtime/Android.bp b/runtime/Android.bp
index 6b691fd..db4ef02 100644
--- a/runtime/Android.bp
+++ b/runtime/Android.bp
@@ -121,6 +121,7 @@
         "jni/check_jni.cc",
         "jni/java_vm_ext.cc",
         "jni/jni_env_ext.cc",
+        "jni/jni_id_manager.cc",
         "jni/jni_internal.cc",
         "linear_alloc.cc",
         "managed_stack.cc",
@@ -479,6 +480,7 @@
         "jdwp_provider.h",
         "jdwp/jdwp.h",
         "jdwp/jdwp_constants.h",
+        "jni_id_type.h",
         "lock_word.h",
         "oat.h",
         "object_callbacks.h",
diff --git a/runtime/base/locks.cc b/runtime/base/locks.cc
index 1cec44c..7404d0d 100644
--- a/runtime/base/locks.cc
+++ b/runtime/base/locks.cc
@@ -76,6 +76,7 @@
 Mutex* Locks::jni_weak_globals_lock_ = nullptr;
 ReaderWriterMutex* Locks::dex_lock_ = nullptr;
 Mutex* Locks::native_debug_interface_lock_ = nullptr;
+ReaderWriterMutex* Locks::jni_id_lock_ = nullptr;
 std::vector<BaseMutex*> Locks::expected_mutexes_on_weak_ref_access_;
 Atomic<const BaseMutex*> Locks::expected_mutexes_on_weak_ref_access_guard_;
 
@@ -157,6 +158,7 @@
     DCHECK(user_code_suspension_lock_ != nullptr);
     DCHECK(dex_lock_ != nullptr);
     DCHECK(native_debug_interface_lock_ != nullptr);
+    DCHECK(jni_id_lock_ != nullptr);
     DCHECK(runtime_thread_pool_lock_ != nullptr);
   } else {
     // Create global locks in level order from highest lock level to lowest.
@@ -317,6 +319,10 @@
     DCHECK(native_debug_interface_lock_ == nullptr);
     native_debug_interface_lock_ = new Mutex("Native debug interface lock", current_lock_level);
 
+    UPDATE_CURRENT_LOCK_LEVEL(kJniIdLock);
+    DCHECK(jni_id_lock_ == nullptr);
+    jni_id_lock_ = new ReaderWriterMutex("JNI id map lock", current_lock_level);
+
     UPDATE_CURRENT_LOCK_LEVEL(kAbortLock);
     DCHECK(abort_lock_ == nullptr);
     abort_lock_ = new Mutex("abort lock", current_lock_level, true);
diff --git a/runtime/base/locks.h b/runtime/base/locks.h
index 77e304d..4b85df0 100644
--- a/runtime/base/locks.h
+++ b/runtime/base/locks.h
@@ -45,6 +45,7 @@
   kUnexpectedSignalLock,
   kThreadSuspendCountLock,
   kAbortLock,
+  kJniIdLock,
   kNativeDebugInterfaceLock,
   kSignalHandlingLock,
   // A generic lock level for mutexs that should not allow any additional mutexes to be gained after
@@ -353,8 +354,12 @@
   // Guards the magic global variables used by native tools (e.g. libunwind).
   static Mutex* native_debug_interface_lock_ ACQUIRED_AFTER(unexpected_signal_lock_);
 
+  // Guards the data structures responsible for keeping track of the JNI
+  // jmethodID/jfieldID <-> ArtMethod/ArtField mapping when using index-ids.
+  static ReaderWriterMutex* jni_id_lock_ ACQUIRED_AFTER(native_debug_interface_lock_);
+
   // Have an exclusive logging thread.
-  static Mutex* logging_lock_ ACQUIRED_AFTER(native_debug_interface_lock_);
+  static Mutex* logging_lock_ ACQUIRED_AFTER(jni_id_lock_);
 
   // List of mutexes that we expect a thread may hold when accessing weak refs. This is used to
   // avoid a deadlock in the empty checkpoint while weak ref access is disabled (b/34964016). If we
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 2a7cbaa..0ffbc4a 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -610,6 +610,8 @@
 
 struct ClassExtOffsets : public CheckOffsets<mirror::ClassExt> {
   ClassExtOffsets() : CheckOffsets<mirror::ClassExt>(false, "Ldalvik/system/ClassExt;") {
+    addOffset(OFFSETOF_MEMBER(mirror::ClassExt, instance_jfield_ids_), "instanceJfieldIDs");
+    addOffset(OFFSETOF_MEMBER(mirror::ClassExt, jmethod_ids_), "jmethodIDs");
     addOffset(OFFSETOF_MEMBER(mirror::ClassExt, obsolete_dex_caches_), "obsoleteDexCaches");
     addOffset(OFFSETOF_MEMBER(mirror::ClassExt, obsolete_methods_), "obsoleteMethods");
     addOffset(OFFSETOF_MEMBER(mirror::ClassExt, original_dex_file_), "originalDexFile");
@@ -617,6 +619,7 @@
               "preRedefineClassDefIndex");
     addOffset(OFFSETOF_MEMBER(mirror::ClassExt, pre_redefine_dex_file_ptr_),
               "preRedefineDexFilePtr");
+    addOffset(OFFSETOF_MEMBER(mirror::ClassExt, static_jfield_ids_), "staticJfieldIDs");
     addOffset(OFFSETOF_MEMBER(mirror::ClassExt, verify_error_), "verifyError");
   }
 };
diff --git a/runtime/jni/jni_env_ext.h b/runtime/jni/jni_env_ext.h
index 61de074..924ff51 100644
--- a/runtime/jni/jni_env_ext.h
+++ b/runtime/jni/jni_env_ext.h
@@ -199,7 +199,7 @@
   // If we are a JNI env for a daemon thread with a deleted runtime.
   bool runtime_deleted_;
 
-  friend class JNI;
+  template<bool kEnableIndexIds> friend class JNI;
   friend class ScopedJniEnvLocalRefState;
   friend class Thread;
   ART_FRIEND_TEST(JniInternalTest, JNIEnvExtOffsets);
diff --git a/runtime/jni/jni_id_manager.cc b/runtime/jni/jni_id_manager.cc
new file mode 100644
index 0000000..16f4d5f
--- /dev/null
+++ b/runtime/jni/jni_id_manager.cc
@@ -0,0 +1,418 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "jni_id_manager.h"
+
+#include "android-base/macros.h"
+#include "art_field-inl.h"
+#include "art_method-inl.h"
+#include "base/enums.h"
+#include "base/globals.h"
+#include "base/locks.h"
+#include "base/mutex.h"
+#include "gc/allocation_listener.h"
+#include "gc/heap.h"
+#include "jni/jni_internal.h"
+#include "mirror/array-inl.h"
+#include "mirror/class-inl.h"
+#include "mirror/class.h"
+#include "mirror/class_ext.h"
+#include "mirror/object-inl.h"
+#include "obj_ptr-inl.h"
+#include "thread-inl.h"
+#include "thread.h"
+#include <algorithm>
+#include <cstdint>
+#include <type_traits>
+
+namespace art {
+namespace jni {
+
+constexpr bool kTraceIds = false;
+
+// TODO This whole thing could be done lock & wait free (since we never remove anything from the
+// ids list). It's not clear this would be worthwile though.
+
+namespace {
+
+static constexpr size_t IdToIndex(uintptr_t id) {
+  return id >> 1;
+}
+
+static constexpr uintptr_t IndexToId(size_t index) {
+  return (index << 1) + 1;
+}
+
+template <typename ArtType>
+ObjPtr<mirror::PointerArray> GetOrCreateIds(Thread* self,
+                                            ObjPtr<mirror::Class> k,
+                                            ArtType* t,
+                                            /*out*/bool* allocation_failure)
+    REQUIRES_SHARED(Locks::mutator_lock_);
+
+template <>
+ObjPtr<mirror::PointerArray> GetOrCreateIds(Thread* self,
+                                            ObjPtr<mirror::Class> k,
+                                            ArtField* field,
+                                            /*out*/bool* allocation_failure) {
+  ScopedExceptionStorage ses(self);
+  ObjPtr<mirror::PointerArray> res;
+  if (Locks::mutator_lock_->IsExclusiveHeld(self)) {
+    res = field->IsStatic() ? k->GetStaticFieldIds() : k->GetInstanceFieldIds();
+  } else {
+    res = field->IsStatic() ? k->GetOrCreateStaticFieldIds() : k->GetOrCreateInstanceFieldIds();
+  }
+  if (self->IsExceptionPending()) {
+    self->AssertPendingOOMException();
+    ses.SuppressOldException("Failed to allocate maps for jmethodIDs. ");
+    *allocation_failure = true;
+  } else {
+    *allocation_failure = false;
+  }
+  return res;
+}
+
+template <>
+ObjPtr<mirror::PointerArray> GetOrCreateIds(Thread* self,
+                                            ObjPtr<mirror::Class> k,
+                                            ArtMethod* method,
+                                            /*out*/bool* allocation_failure) {
+  if (method->IsObsolete()) {
+    if (kTraceIds) {
+      LOG(INFO) << "jmethodID for Obsolete method " << method->PrettyMethod() << " requested!";
+    }
+    // No ids array for obsolete methods. Just do a linear scan.
+    *allocation_failure = false;
+    return nullptr;
+  }
+  ObjPtr<mirror::PointerArray> res;
+  if (Locks::mutator_lock_->IsExclusiveHeld(self) || !Locks::mutator_lock_->IsSharedHeld(self)) {
+    res = k->GetMethodIds();
+  } else {
+    res = k->GetOrCreateMethodIds();
+  }
+  if (self->IsExceptionPending()) {
+    self->AssertPendingOOMException();
+    *allocation_failure = true;
+  } else {
+    *allocation_failure = false;
+  }
+  return res;
+}
+
+template <typename ArtType>
+size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtType* t, PointerSize pointer_size)
+    REQUIRES_SHARED(Locks::mutator_lock_);
+template <>
+size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtField* f, PointerSize ptr_size ATTRIBUTE_UNUSED) {
+  return f->IsStatic() ? k->GetStaticFieldIdOffset(f) : k->GetInstanceFieldIdOffset(f);
+}
+template <>
+size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtMethod* method, PointerSize pointer_size) {
+  return method->IsObsolete() ? -1 : k->GetMethodIdOffset(method, pointer_size);
+}
+
+// Calls the relevant PrettyMethod/PrettyField on the input.
+template <typename ArtType>
+std::string PrettyGeneric(ArtType* t) REQUIRES_SHARED(Locks::mutator_lock_);
+template <> std::string PrettyGeneric(ArtMethod* f) {
+  return f->PrettyMethod();
+}
+template <> std::string PrettyGeneric(ArtField* f) {
+  return f->PrettyField();
+}
+
+// Checks if the field or method is obsolete.
+template <typename ArtType> bool IsObsolete(ArtType* t) REQUIRES_SHARED(Locks::mutator_lock_);
+template <> bool IsObsolete(ArtField* t ATTRIBUTE_UNUSED) {
+  return false;
+}
+template <> bool IsObsolete(ArtMethod* t) {
+  return t->IsObsolete();
+}
+
+// Get the canonical (non-copied) version of the field or method. Only relevant for methods.
+template <typename ArtType> ArtType* Canonicalize(ArtType* t) REQUIRES_SHARED(Locks::mutator_lock_);
+template <> ArtField* Canonicalize(ArtField* t) {
+  return t;
+}
+template <> ArtMethod* Canonicalize(ArtMethod* t) {
+  if (UNLIKELY(t->IsCopied())) {
+    t = t->GetCanonicalMethod();
+  }
+  return t;
+}
+
+};  // namespace
+
+// We increment the id by 2 each time to allow us to use the LSB as a flag that the ID is an index
+// and not a pointer. This gives us 2**31 unique methods that can be addressed on 32-bit art, which
+// should be more than enough.
+template <> uintptr_t JniIdManager::GetNextId<ArtField>() {
+  uintptr_t res = next_field_id_;
+  next_field_id_ += 2;
+  CHECK_GT(next_field_id_, res) << "jfieldID Overflow";
+  return res;
+}
+
+template <> uintptr_t JniIdManager::GetNextId<ArtMethod>() {
+  uintptr_t res = next_method_id_;
+  next_method_id_ += 2;
+  CHECK_GT(next_method_id_, res) << "jmethodID Overflow";
+  return res;
+}
+template <> std::vector<ArtField*>& JniIdManager::GetGenericMap<ArtField>() {
+  return field_id_map_;
+}
+
+template <> std::vector<ArtMethod*>& JniIdManager::GetGenericMap<ArtMethod>() {
+  return method_id_map_;
+}
+template <> size_t JniIdManager::GetLinearSearchStartId<ArtField>(ArtField* t ATTRIBUTE_UNUSED) {
+  return deferred_allocation_field_id_start_;
+}
+
+template <> size_t JniIdManager::GetLinearSearchStartId<ArtMethod>(ArtMethod* m) {
+  if (m->IsObsolete()) {
+    return 1;
+  } else {
+    return deferred_allocation_method_id_start_;
+  }
+}
+
+template <typename ArtType> uintptr_t JniIdManager::EncodeGenericId(ArtType* t) {
+  if (!Runtime::Current()->JniIdsAreIndices() || t == nullptr) {
+    return reinterpret_cast<uintptr_t>(t);
+  }
+  Thread* self = Thread::Current();
+  ScopedExceptionStorage ses(self);
+  t = Canonicalize(t);
+  ObjPtr<mirror::Class> klass = t->GetDeclaringClass();
+  DCHECK(!klass.IsNull()) << "Null declaring class " << PrettyGeneric(t);
+  size_t off = GetIdOffset(klass, t, kRuntimePointerSize);
+  bool allocation_failure = false;
+  ObjPtr<mirror::PointerArray> ids(GetOrCreateIds(self, klass, t, &allocation_failure));
+  if (allocation_failure) {
+    self->AssertPendingOOMException();
+    ses.SuppressOldException("OOM exception while trying to allocate JNI ids.");
+    return 0u;
+  }
+  uintptr_t cur_id = 0;
+  if (!ids.IsNull()) {
+    DCHECK_GT(ids->GetLength(), static_cast<int32_t>(off)) << " is " << PrettyGeneric(t);
+    cur_id = ids->GetElementPtrSize<uintptr_t>(off, kRuntimePointerSize);
+  }
+  if (cur_id != 0) {
+    return cur_id;
+  }
+  WriterMutexLock mu(self, *Locks::jni_id_lock_);
+  // Check the ids array for a racing id.
+  if (!ids.IsNull()) {
+    cur_id = ids->GetElementPtrSize<uintptr_t>(off, kRuntimePointerSize);
+    if (cur_id != 0) {
+      // We were racing some other thread and lost.
+      return cur_id;
+    }
+  } else {
+    // We cannot allocate anything here or don't have an ids array (we might be an obsolete method).
+    DCHECK(IsObsolete(t) || deferred_allocation_refcount_ > 0u)
+        << "deferred_allocation_refcount_: " << deferred_allocation_refcount_
+        << " t: " << PrettyGeneric(t);
+    // Check to see if we raced and lost to another thread.
+    const std::vector<ArtType*>& vec = GetGenericMap<ArtType>();
+    bool found = false;
+    // simple count-while.
+    size_t search_start_index = IdToIndex(GetLinearSearchStartId(t));
+    size_t index = std::count_if(vec.cbegin() + search_start_index,
+                                 vec.cend(),
+                                 [&found, t](const ArtType* candidate) {
+                                   found = found || candidate == t;
+                                   return !found;
+                                 }) +
+                   search_start_index;
+    if (found) {
+      // We were either racing some other thread and lost or this thread was asked to encode the
+      // same method multiple times while holding the mutator lock.
+      DCHECK_EQ(vec[index], t) << "Expected: " << PrettyGeneric(vec[index]) << " got "
+                               << PrettyGeneric(t) << " at index " << index
+                               << " (id: " << IndexToId(index) << ").";
+      return IndexToId(index);
+    }
+  }
+  cur_id = GetNextId<ArtType>();
+  size_t cur_index = IdToIndex(cur_id);
+  std::vector<ArtType*>& vec = GetGenericMap<ArtType>();
+  vec.reserve(cur_index + 1);
+  vec.resize(std::max(vec.size(), cur_index + 1), nullptr);
+  vec[cur_index] = t;
+  if (ids.IsNull()) {
+    if (kIsDebugBuild && !IsObsolete(t)) {
+      CHECK_NE(deferred_allocation_refcount_, 0u)
+          << "Failed to allocate ids array despite not being forbidden from doing so!";
+      Locks::mutator_lock_->AssertExclusiveHeld(self);
+    }
+  } else {
+    ids->SetElementPtrSize(off, reinterpret_cast<void*>(cur_id), kRuntimePointerSize);
+  }
+  return cur_id;
+}
+
+jfieldID JniIdManager::EncodeFieldId(ArtField* field) {
+  auto* res = reinterpret_cast<jfieldID>(EncodeGenericId(field));
+  if (kTraceIds && field != nullptr) {
+    LOG(INFO) << "Returning " << res << " for field " << field->PrettyField();
+  }
+  return res;
+}
+jmethodID JniIdManager::EncodeMethodId(ArtMethod* method) {
+  auto* res = reinterpret_cast<jmethodID>(EncodeGenericId(method));
+  if (kTraceIds && method != nullptr) {
+    LOG(INFO) << "Returning " << res << " for method " << method->PrettyMethod();
+  }
+  return res;
+}
+
+template <typename ArtType> ArtType* JniIdManager::DecodeGenericId(uintptr_t t) {
+  if (Runtime::Current()->JniIdsAreIndices() && (t % 2) == 1) {
+    ReaderMutexLock mu(Thread::Current(), *Locks::jni_id_lock_);
+    size_t index = IdToIndex(t);
+    DCHECK_GT(GetGenericMap<ArtType>().size(), index);
+    return GetGenericMap<ArtType>().at(index);
+  } else {
+    DCHECK_EQ((t % 2), 0u) << "id: " << t;
+    return reinterpret_cast<ArtType*>(t);
+  }
+}
+
+ArtMethod* JniIdManager::DecodeMethodId(jmethodID method) {
+  return DecodeGenericId<ArtMethod>(reinterpret_cast<uintptr_t>(method));
+}
+
+ArtField* JniIdManager::DecodeFieldId(jfieldID field) {
+  return DecodeGenericId<ArtField>(reinterpret_cast<uintptr_t>(field));
+}
+
+// This whole defer system is an annoying requirement to allow us to generate IDs during heap-walks
+// such as those required for instrumentation tooling.
+//
+// The defer system works with the normal id-assignment routine to ensure that all the class-ext
+// data structures are eventually created and filled in. Basically how it works is the id-assignment
+// function will check to see if it has a strong mutator-lock. If it does not then it will try to
+// allocate the class-ext data structures normally and fail if it is unable to do so. In the case
+// where mutator-lock is being held exclusive no attempt to allocate will be made and the thread
+// will CHECK that allocations are being deferred (or that the method is obsolete, in which case
+// there is no class-ext to store the method->id map in).
+//
+// Once the thread is done holding the exclusive mutator-lock it will go back and fill-in the
+// class-ext data of all the methods that were added. We do this without the exclusive mutator-lock
+// on a copy of the maps before we decrement the deferred refcount. This ensures that any other
+// threads running at the same time know they need to perform a linear scan of the id-map. Since we
+// don't have the mutator-lock anymore other threads can allocate the class-ext data, meaning our
+// copy is fine. The only way additional methods could end up on the id-maps after our copy without
+// having class-ext data is if another thread picked up the exclusive mutator-lock and added another
+// defer, in which case that thread would fix-up the remaining ids. In this way we maintain eventual
+// consistency between the class-ext method/field->id maps and the JniIdManager id->method/field
+// maps.
+//
+// TODO It is possible that another thread to gain the mutator-lock and allocate new ids without
+// calling StartDefer. This is basically a race that we should try to catch but doing so is
+// rather difficult and since this defer system is only used in very rare circumstances unlikely to
+// be worth the trouble.
+void JniIdManager::StartDefer() {
+  Thread* self = Thread::Current();
+  WriterMutexLock mu(self, *Locks::jni_id_lock_);
+  if (deferred_allocation_refcount_++ == 0) {
+    deferred_allocation_field_id_start_ = next_field_id_;
+    deferred_allocation_method_id_start_ = next_method_id_;
+  }
+}
+
+void JniIdManager::EndDefer() {
+  // Fixup the method->id map.
+  Thread* self = Thread::Current();
+  auto set_id = [&](auto* t, uintptr_t id) REQUIRES_SHARED(Locks::mutator_lock_) {
+    if (t == nullptr) {
+      return;
+    }
+    ObjPtr<mirror::Class> klass(t->GetDeclaringClass());
+    size_t off = GetIdOffset(klass, t, kRuntimePointerSize);
+    bool alloc_failure = false;
+    ObjPtr<mirror::PointerArray> ids = GetOrCreateIds(self, klass, t, &alloc_failure);
+    CHECK(!alloc_failure) << "Could not allocate jni ids array!";
+    if (ids.IsNull()) {
+      return;
+    }
+    if (kIsDebugBuild) {
+      uintptr_t old_id = ids->GetElementPtrSize<uintptr_t, kRuntimePointerSize>(off);
+      if (old_id != 0) {
+        DCHECK_EQ(old_id, id);
+      }
+    }
+    ids->SetElementPtrSize(off, reinterpret_cast<void*>(id), kRuntimePointerSize);
+  };
+  // To ensure eventual consistency this depends on the fact that the method_id_map_ and
+  // field_id_map_ are the ultimate source of truth and no id is ever reused to be valid. It also
+  // relies on all threads always getting calling StartDefer if they are going to be allocating jni
+  // ids while suspended. If a thread tries to do so while it doesn't have a scope we could miss
+  // ids.
+  // TODO We should use roles or something to verify that this requirement is not broken.
+  //
+  // If another thread comes along and adds more methods to the list after
+  // copying either (1) the id-maps are already present for the method and everything is fine, (2)
+  // the thread is not suspended and so can create the ext-data and id lists or, (3) the thread also
+  // suspended everything and incremented the deferred_allocation_refcount_ so it will fix up new
+  // ids when it finishes.
+  Locks::mutator_lock_->AssertNotExclusiveHeld(self);
+  Locks::mutator_lock_->AssertSharedHeld(self);
+  std::vector<ArtMethod*> method_id_copy;
+  std::vector<ArtField*> field_id_copy;
+  uintptr_t method_start_id;
+  uintptr_t field_start_id;
+  {
+    ReaderMutexLock mu(self, *Locks::jni_id_lock_);
+    method_id_copy = method_id_map_;
+    field_id_copy = field_id_map_;
+    method_start_id = deferred_allocation_method_id_start_;
+    field_start_id = deferred_allocation_field_id_start_;
+  }
+
+  for (size_t index = kIsDebugBuild ? 0 : IdToIndex(method_start_id); index < method_id_copy.size();
+       ++index) {
+    set_id(method_id_copy[index], IndexToId(index));
+  }
+  for (size_t index = kIsDebugBuild ? 0 : IdToIndex(field_start_id); index < field_id_copy.size();
+       ++index) {
+    set_id(field_id_copy[index], IndexToId(index));
+  }
+  WriterMutexLock mu(self, *Locks::jni_id_lock_);
+  DCHECK_GE(deferred_allocation_refcount_, 1u);
+  if (--deferred_allocation_refcount_ == 0) {
+    deferred_allocation_field_id_start_ = 0;
+    deferred_allocation_method_id_start_ = 0;
+  }
+}
+
+ScopedEnableSuspendAllJniIdQueries::ScopedEnableSuspendAllJniIdQueries()
+    : manager_(Runtime::Current()->GetJniIdManager()) {
+  manager_->StartDefer();
+}
+
+ScopedEnableSuspendAllJniIdQueries::~ScopedEnableSuspendAllJniIdQueries() {
+  manager_->EndDefer();
+}
+
+};  // namespace jni
+};  // namespace art
diff --git a/runtime/jni/jni_id_manager.h b/runtime/jni/jni_id_manager.h
new file mode 100644
index 0000000..5e5c581
--- /dev/null
+++ b/runtime/jni/jni_id_manager.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_JNI_JNI_ID_MANAGER_H_
+#define ART_RUNTIME_JNI_JNI_ID_MANAGER_H_
+
+#include <atomic>
+#include <jni.h>
+#include <vector>
+
+#include "art_field.h"
+#include "art_method.h"
+#include "base/mutex.h"
+
+namespace art {
+namespace jni {
+
+class ScopedEnableSuspendAllJniIdQueries;
+class JniIdManager {
+ public:
+  ArtMethod* DecodeMethodId(jmethodID method) REQUIRES(!Locks::jni_id_lock_);
+  ArtField* DecodeFieldId(jfieldID field) REQUIRES(!Locks::jni_id_lock_);
+  jmethodID EncodeMethodId(ArtMethod* method) REQUIRES(!Locks::jni_id_lock_)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+  jfieldID EncodeFieldId(ArtField* field) REQUIRES(!Locks::jni_id_lock_)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
+ private:
+  template <typename ArtType>
+  uintptr_t EncodeGenericId(ArtType* t) REQUIRES(!Locks::jni_id_lock_)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+  template <typename ArtType>
+  ArtType* DecodeGenericId(uintptr_t input) REQUIRES(!Locks::jni_id_lock_);
+  template <typename ArtType> std::vector<ArtType*>& GetGenericMap() REQUIRES(Locks::jni_id_lock_);
+  template <typename ArtType> uintptr_t GetNextId() REQUIRES(Locks::jni_id_lock_);
+  template <typename ArtType>
+  size_t GetLinearSearchStartId(ArtType* t) REQUIRES(Locks::jni_id_lock_);
+
+  void StartDefer() REQUIRES(!Locks::jni_id_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
+  void EndDefer() REQUIRES(!Locks::jni_id_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
+
+  uintptr_t next_method_id_ GUARDED_BY(Locks::jni_id_lock_) = 1u;
+  std::vector<ArtMethod*> method_id_map_ GUARDED_BY(Locks::jni_id_lock_);
+  uintptr_t next_field_id_ GUARDED_BY(Locks::jni_id_lock_) = 1u;
+  std::vector<ArtField*> field_id_map_ GUARDED_BY(Locks::jni_id_lock_);
+
+  // If non-zero indicates that some thread is trying to allocate ids without being able to update
+  // the method->id mapping (due to not being able to allocate or something). In this case decode
+  // and encode need to do a linear scan of the lists. The ScopedEnableSuspendAllJniIdQueries struct
+  // will deal with fixing everything up.
+  size_t deferred_allocation_refcount_ GUARDED_BY(Locks::jni_id_lock_) = 0;
+  // min jmethodID that might not have it's method->id mapping filled in.
+  uintptr_t deferred_allocation_method_id_start_ GUARDED_BY(Locks::jni_id_lock_) = 0u;
+  // min jfieldID that might not have it's field->id mapping filled in.
+  uintptr_t deferred_allocation_field_id_start_ GUARDED_BY(Locks::jni_id_lock_) = 0u;
+
+  friend class ScopedEnableSuspendAllJniIdQueries;
+};
+
+// A scope that will enable using the Encode/Decode JNI id functions with all threads suspended.
+// This is required since normally we need to be able to allocate to encode new ids. This should
+// only be used when absolutely required, for example to invoke user-callbacks during heap walking
+// or similar.
+class ScopedEnableSuspendAllJniIdQueries {
+ public:
+  ScopedEnableSuspendAllJniIdQueries() REQUIRES_SHARED(Locks::mutator_lock_);
+  ~ScopedEnableSuspendAllJniIdQueries() REQUIRES_SHARED(Locks::mutator_lock_);
+
+ private:
+  JniIdManager* manager_;
+};
+
+}  // namespace jni
+}  // namespace art
+
+#endif  // ART_RUNTIME_JNI_JNI_ID_MANAGER_H_
diff --git a/runtime/jni/jni_internal.cc b/runtime/jni/jni_internal.cc
index af335f6..cd775d6 100644
--- a/runtime/jni/jni_internal.cc
+++ b/runtime/jni/jni_internal.cc
@@ -428,6 +428,7 @@
   return h_klass.Get();
 }
 
+template<bool kEnableIndexIds>
 static jmethodID FindMethodID(ScopedObjectAccess& soa, jclass jni_class,
                               const char* name, const char* sig, bool is_static)
     REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -449,14 +450,16 @@
     ThrowNoSuchMethodError(soa, c, name, sig, is_static ? "static" : "non-static");
     return nullptr;
   }
-  return jni::EncodeArtMethod(method);
+  return jni::EncodeArtMethod<kEnableIndexIds>(method);
 }
 
+template<bool kEnableIndexIds>
 static ObjPtr<mirror::ClassLoader> GetClassLoader(const ScopedObjectAccess& soa)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   ArtMethod* method = soa.Self()->GetCurrentMethod(nullptr);
   // If we are running Runtime.nativeLoad, use the overriding ClassLoader it set.
-  if (method == jni::DecodeArtMethod(WellKnownClasses::java_lang_Runtime_nativeLoad)) {
+  if (method ==
+      jni::DecodeArtMethod<kEnableIndexIds>(WellKnownClasses::java_lang_Runtime_nativeLoad)) {
     return soa.Decode<mirror::ClassLoader>(soa.Self()->GetClassLoaderOverride());
   }
   // If we have a method, use its ClassLoader for context.
@@ -482,6 +485,7 @@
   return nullptr;
 }
 
+template<bool kEnableIndexIds>
 static jfieldID FindFieldID(const ScopedObjectAccess& soa, jclass jni_class, const char* name,
                             const char* sig, bool is_static)
     REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -534,7 +538,7 @@
                                    sig, name, c->GetDescriptor(&temp));
     return nullptr;
   }
-  return jni::EncodeArtField(field);
+  return jni::EncodeArtField<kEnableIndexIds>(field);
 }
 
 static void ThrowAIOOBE(ScopedObjectAccess& soa,
@@ -641,6 +645,7 @@
   return nullptr;
 }
 
+template <bool kEnableIndexIds>
 class JNI {
  public:
   static jint GetVersion(JNIEnv*) {
@@ -661,7 +666,7 @@
     ObjPtr<mirror::Class> c = nullptr;
     if (runtime->IsStarted()) {
       StackHandleScope<1> hs(soa.Self());
-      Handle<mirror::ClassLoader> class_loader(hs.NewHandle(GetClassLoader(soa)));
+      Handle<mirror::ClassLoader> class_loader(hs.NewHandle(GetClassLoader<kEnableIndexIds>(soa)));
       c = class_linker->FindClass(soa.Self(), descriptor.c_str(), class_loader);
     } else {
       c = class_linker->FindSystemClass(soa.Self(), descriptor.c_str());
@@ -672,7 +677,7 @@
   static jmethodID FromReflectedMethod(JNIEnv* env, jobject jlr_method) {
     CHECK_NON_NULL_ARGUMENT(jlr_method);
     ScopedObjectAccess soa(env);
-    return jni::EncodeArtMethod(ArtMethod::FromReflectedMethod(soa, jlr_method));
+    return jni::EncodeArtMethod<kEnableIndexIds>(ArtMethod::FromReflectedMethod(soa, jlr_method));
   }
 
   static jfieldID FromReflectedField(JNIEnv* env, jobject jlr_field) {
@@ -684,7 +689,7 @@
       return nullptr;
     }
     ObjPtr<mirror::Field> field = ObjPtr<mirror::Field>::DownCast(obj_field);
-    return jni::EncodeArtField(field->GetArtField());
+    return jni::EncodeArtField<kEnableIndexIds>(field->GetArtField());
   }
 
   static jobject ToReflectedMethod(JNIEnv* env, jclass, jmethodID mid, jboolean) {
@@ -935,7 +940,7 @@
     }
     if (c->IsStringClass()) {
       // Replace calls to String.<init> with equivalent StringFactory call.
-      jmethodID sf_mid = jni::EncodeArtMethod(
+      jmethodID sf_mid = jni::EncodeArtMethod<kEnableIndexIds>(
           WellKnownClasses::StringInitToStringFactory(jni::DecodeArtMethod(mid)));
       return CallStaticObjectMethodV(env, WellKnownClasses::java_lang_StringFactory, sf_mid, args);
     }
@@ -962,7 +967,7 @@
     }
     if (c->IsStringClass()) {
       // Replace calls to String.<init> with equivalent StringFactory call.
-      jmethodID sf_mid = jni::EncodeArtMethod(
+      jmethodID sf_mid = jni::EncodeArtMethod<kEnableIndexIds>(
           WellKnownClasses::StringInitToStringFactory(jni::DecodeArtMethod(mid)));
       return CallStaticObjectMethodA(env, WellKnownClasses::java_lang_StringFactory, sf_mid, args);
     }
@@ -983,7 +988,7 @@
     CHECK_NON_NULL_ARGUMENT(name);
     CHECK_NON_NULL_ARGUMENT(sig);
     ScopedObjectAccess soa(env);
-    return FindMethodID(soa, java_class, name, sig, false);
+    return FindMethodID<kEnableIndexIds>(soa, java_class, name, sig, false);
   }
 
   static jmethodID GetStaticMethodID(JNIEnv* env, jclass java_class, const char* name,
@@ -992,7 +997,7 @@
     CHECK_NON_NULL_ARGUMENT(name);
     CHECK_NON_NULL_ARGUMENT(sig);
     ScopedObjectAccess soa(env);
-    return FindMethodID(soa, java_class, name, sig, true);
+    return FindMethodID<kEnableIndexIds>(soa, java_class, name, sig, true);
   }
 
   static jobject CallObjectMethod(JNIEnv* env, jobject obj, jmethodID mid, ...) {
@@ -1524,7 +1529,7 @@
     CHECK_NON_NULL_ARGUMENT(name);
     CHECK_NON_NULL_ARGUMENT(sig);
     ScopedObjectAccess soa(env);
-    return FindFieldID(soa, java_class, name, sig, false);
+    return FindFieldID<kEnableIndexIds>(soa, java_class, name, sig, false);
   }
 
   static jfieldID GetStaticFieldID(JNIEnv* env, jclass java_class, const char* name,
@@ -1533,14 +1538,14 @@
     CHECK_NON_NULL_ARGUMENT(name);
     CHECK_NON_NULL_ARGUMENT(sig);
     ScopedObjectAccess soa(env);
-    return FindFieldID(soa, java_class, name, sig, true);
+    return FindFieldID<kEnableIndexIds>(soa, java_class, name, sig, true);
   }
 
   static jobject GetObjectField(JNIEnv* env, jobject obj, jfieldID fid) {
     CHECK_NON_NULL_ARGUMENT(obj);
     CHECK_NON_NULL_ARGUMENT(fid);
     ScopedObjectAccess soa(env);
-    ArtField* f = jni::DecodeArtField(fid);
+    ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid);
     NotifyGetField(f, obj);
     ObjPtr<mirror::Object> o = soa.Decode<mirror::Object>(obj);
     return soa.AddLocalReference<jobject>(f->GetObject(o));
@@ -1549,7 +1554,7 @@
   static jobject GetStaticObjectField(JNIEnv* env, jclass, jfieldID fid) {
     CHECK_NON_NULL_ARGUMENT(fid);
     ScopedObjectAccess soa(env);
-    ArtField* f = jni::DecodeArtField(fid);
+    ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid);
     NotifyGetField(f, nullptr);
     return soa.AddLocalReference<jobject>(f->GetObject(f->GetDeclaringClass()));
   }
@@ -1558,7 +1563,7 @@
     CHECK_NON_NULL_ARGUMENT_RETURN_VOID(java_object);
     CHECK_NON_NULL_ARGUMENT_RETURN_VOID(fid);
     ScopedObjectAccess soa(env);
-    ArtField* f = jni::DecodeArtField(fid);
+    ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid);
     NotifySetObjectField(f, java_object, java_value);
     ObjPtr<mirror::Object> o = soa.Decode<mirror::Object>(java_object);
     ObjPtr<mirror::Object> v = soa.Decode<mirror::Object>(java_value);
@@ -1568,7 +1573,7 @@
   static void SetStaticObjectField(JNIEnv* env, jclass, jfieldID fid, jobject java_value) {
     CHECK_NON_NULL_ARGUMENT_RETURN_VOID(fid);
     ScopedObjectAccess soa(env);
-    ArtField* f = jni::DecodeArtField(fid);
+    ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid);
     NotifySetObjectField(f, nullptr, java_value);
     ObjPtr<mirror::Object> v = soa.Decode<mirror::Object>(java_value);
     f->SetObject<false>(f->GetDeclaringClass(), v);
@@ -1578,7 +1583,7 @@
   CHECK_NON_NULL_ARGUMENT_RETURN_ZERO(instance); \
   CHECK_NON_NULL_ARGUMENT_RETURN_ZERO(fid); \
   ScopedObjectAccess soa(env); \
-  ArtField* f = jni::DecodeArtField(fid); \
+  ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid); \
   NotifyGetField(f, instance); \
   ObjPtr<mirror::Object> o = soa.Decode<mirror::Object>(instance); \
   return f->Get ##fn (o)
@@ -1586,7 +1591,7 @@
 #define GET_STATIC_PRIMITIVE_FIELD(fn) \
   CHECK_NON_NULL_ARGUMENT_RETURN_ZERO(fid); \
   ScopedObjectAccess soa(env); \
-  ArtField* f = jni::DecodeArtField(fid); \
+  ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid); \
   NotifyGetField(f, nullptr); \
   return f->Get ##fn (f->GetDeclaringClass())
 
@@ -1594,7 +1599,7 @@
   CHECK_NON_NULL_ARGUMENT_RETURN_VOID(instance); \
   CHECK_NON_NULL_ARGUMENT_RETURN_VOID(fid); \
   ScopedObjectAccess soa(env); \
-  ArtField* f = jni::DecodeArtField(fid); \
+  ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid); \
   NotifySetPrimitiveField(f, instance, JValue::FromPrimitive<decltype(value)>(value)); \
   ObjPtr<mirror::Object> o = soa.Decode<mirror::Object>(instance); \
   f->Set ##fn <false>(o, value)
@@ -1602,7 +1607,7 @@
 #define SET_STATIC_PRIMITIVE_FIELD(fn, value) \
   CHECK_NON_NULL_ARGUMENT_RETURN_VOID(fid); \
   ScopedObjectAccess soa(env); \
-  ArtField* f = jni::DecodeArtField(fid); \
+  ArtField* f = jni::DecodeArtField<kEnableIndexIds>(fid); \
   NotifySetPrimitiveField(f, nullptr, JValue::FromPrimitive<decltype(value)>(value)); \
   f->Set ##fn <false>(f->GetDeclaringClass(), value)
 
@@ -2881,244 +2886,253 @@
   }
 };
 
-const JNINativeInterface gJniNativeInterface = {
-  nullptr,  // reserved0.
-  nullptr,  // reserved1.
-  nullptr,  // reserved2.
-  nullptr,  // reserved3.
-  JNI::GetVersion,
-  JNI::DefineClass,
-  JNI::FindClass,
-  JNI::FromReflectedMethod,
-  JNI::FromReflectedField,
-  JNI::ToReflectedMethod,
-  JNI::GetSuperclass,
-  JNI::IsAssignableFrom,
-  JNI::ToReflectedField,
-  JNI::Throw,
-  JNI::ThrowNew,
-  JNI::ExceptionOccurred,
-  JNI::ExceptionDescribe,
-  JNI::ExceptionClear,
-  JNI::FatalError,
-  JNI::PushLocalFrame,
-  JNI::PopLocalFrame,
-  JNI::NewGlobalRef,
-  JNI::DeleteGlobalRef,
-  JNI::DeleteLocalRef,
-  JNI::IsSameObject,
-  JNI::NewLocalRef,
-  JNI::EnsureLocalCapacity,
-  JNI::AllocObject,
-  JNI::NewObject,
-  JNI::NewObjectV,
-  JNI::NewObjectA,
-  JNI::GetObjectClass,
-  JNI::IsInstanceOf,
-  JNI::GetMethodID,
-  JNI::CallObjectMethod,
-  JNI::CallObjectMethodV,
-  JNI::CallObjectMethodA,
-  JNI::CallBooleanMethod,
-  JNI::CallBooleanMethodV,
-  JNI::CallBooleanMethodA,
-  JNI::CallByteMethod,
-  JNI::CallByteMethodV,
-  JNI::CallByteMethodA,
-  JNI::CallCharMethod,
-  JNI::CallCharMethodV,
-  JNI::CallCharMethodA,
-  JNI::CallShortMethod,
-  JNI::CallShortMethodV,
-  JNI::CallShortMethodA,
-  JNI::CallIntMethod,
-  JNI::CallIntMethodV,
-  JNI::CallIntMethodA,
-  JNI::CallLongMethod,
-  JNI::CallLongMethodV,
-  JNI::CallLongMethodA,
-  JNI::CallFloatMethod,
-  JNI::CallFloatMethodV,
-  JNI::CallFloatMethodA,
-  JNI::CallDoubleMethod,
-  JNI::CallDoubleMethodV,
-  JNI::CallDoubleMethodA,
-  JNI::CallVoidMethod,
-  JNI::CallVoidMethodV,
-  JNI::CallVoidMethodA,
-  JNI::CallNonvirtualObjectMethod,
-  JNI::CallNonvirtualObjectMethodV,
-  JNI::CallNonvirtualObjectMethodA,
-  JNI::CallNonvirtualBooleanMethod,
-  JNI::CallNonvirtualBooleanMethodV,
-  JNI::CallNonvirtualBooleanMethodA,
-  JNI::CallNonvirtualByteMethod,
-  JNI::CallNonvirtualByteMethodV,
-  JNI::CallNonvirtualByteMethodA,
-  JNI::CallNonvirtualCharMethod,
-  JNI::CallNonvirtualCharMethodV,
-  JNI::CallNonvirtualCharMethodA,
-  JNI::CallNonvirtualShortMethod,
-  JNI::CallNonvirtualShortMethodV,
-  JNI::CallNonvirtualShortMethodA,
-  JNI::CallNonvirtualIntMethod,
-  JNI::CallNonvirtualIntMethodV,
-  JNI::CallNonvirtualIntMethodA,
-  JNI::CallNonvirtualLongMethod,
-  JNI::CallNonvirtualLongMethodV,
-  JNI::CallNonvirtualLongMethodA,
-  JNI::CallNonvirtualFloatMethod,
-  JNI::CallNonvirtualFloatMethodV,
-  JNI::CallNonvirtualFloatMethodA,
-  JNI::CallNonvirtualDoubleMethod,
-  JNI::CallNonvirtualDoubleMethodV,
-  JNI::CallNonvirtualDoubleMethodA,
-  JNI::CallNonvirtualVoidMethod,
-  JNI::CallNonvirtualVoidMethodV,
-  JNI::CallNonvirtualVoidMethodA,
-  JNI::GetFieldID,
-  JNI::GetObjectField,
-  JNI::GetBooleanField,
-  JNI::GetByteField,
-  JNI::GetCharField,
-  JNI::GetShortField,
-  JNI::GetIntField,
-  JNI::GetLongField,
-  JNI::GetFloatField,
-  JNI::GetDoubleField,
-  JNI::SetObjectField,
-  JNI::SetBooleanField,
-  JNI::SetByteField,
-  JNI::SetCharField,
-  JNI::SetShortField,
-  JNI::SetIntField,
-  JNI::SetLongField,
-  JNI::SetFloatField,
-  JNI::SetDoubleField,
-  JNI::GetStaticMethodID,
-  JNI::CallStaticObjectMethod,
-  JNI::CallStaticObjectMethodV,
-  JNI::CallStaticObjectMethodA,
-  JNI::CallStaticBooleanMethod,
-  JNI::CallStaticBooleanMethodV,
-  JNI::CallStaticBooleanMethodA,
-  JNI::CallStaticByteMethod,
-  JNI::CallStaticByteMethodV,
-  JNI::CallStaticByteMethodA,
-  JNI::CallStaticCharMethod,
-  JNI::CallStaticCharMethodV,
-  JNI::CallStaticCharMethodA,
-  JNI::CallStaticShortMethod,
-  JNI::CallStaticShortMethodV,
-  JNI::CallStaticShortMethodA,
-  JNI::CallStaticIntMethod,
-  JNI::CallStaticIntMethodV,
-  JNI::CallStaticIntMethodA,
-  JNI::CallStaticLongMethod,
-  JNI::CallStaticLongMethodV,
-  JNI::CallStaticLongMethodA,
-  JNI::CallStaticFloatMethod,
-  JNI::CallStaticFloatMethodV,
-  JNI::CallStaticFloatMethodA,
-  JNI::CallStaticDoubleMethod,
-  JNI::CallStaticDoubleMethodV,
-  JNI::CallStaticDoubleMethodA,
-  JNI::CallStaticVoidMethod,
-  JNI::CallStaticVoidMethodV,
-  JNI::CallStaticVoidMethodA,
-  JNI::GetStaticFieldID,
-  JNI::GetStaticObjectField,
-  JNI::GetStaticBooleanField,
-  JNI::GetStaticByteField,
-  JNI::GetStaticCharField,
-  JNI::GetStaticShortField,
-  JNI::GetStaticIntField,
-  JNI::GetStaticLongField,
-  JNI::GetStaticFloatField,
-  JNI::GetStaticDoubleField,
-  JNI::SetStaticObjectField,
-  JNI::SetStaticBooleanField,
-  JNI::SetStaticByteField,
-  JNI::SetStaticCharField,
-  JNI::SetStaticShortField,
-  JNI::SetStaticIntField,
-  JNI::SetStaticLongField,
-  JNI::SetStaticFloatField,
-  JNI::SetStaticDoubleField,
-  JNI::NewString,
-  JNI::GetStringLength,
-  JNI::GetStringChars,
-  JNI::ReleaseStringChars,
-  JNI::NewStringUTF,
-  JNI::GetStringUTFLength,
-  JNI::GetStringUTFChars,
-  JNI::ReleaseStringUTFChars,
-  JNI::GetArrayLength,
-  JNI::NewObjectArray,
-  JNI::GetObjectArrayElement,
-  JNI::SetObjectArrayElement,
-  JNI::NewBooleanArray,
-  JNI::NewByteArray,
-  JNI::NewCharArray,
-  JNI::NewShortArray,
-  JNI::NewIntArray,
-  JNI::NewLongArray,
-  JNI::NewFloatArray,
-  JNI::NewDoubleArray,
-  JNI::GetBooleanArrayElements,
-  JNI::GetByteArrayElements,
-  JNI::GetCharArrayElements,
-  JNI::GetShortArrayElements,
-  JNI::GetIntArrayElements,
-  JNI::GetLongArrayElements,
-  JNI::GetFloatArrayElements,
-  JNI::GetDoubleArrayElements,
-  JNI::ReleaseBooleanArrayElements,
-  JNI::ReleaseByteArrayElements,
-  JNI::ReleaseCharArrayElements,
-  JNI::ReleaseShortArrayElements,
-  JNI::ReleaseIntArrayElements,
-  JNI::ReleaseLongArrayElements,
-  JNI::ReleaseFloatArrayElements,
-  JNI::ReleaseDoubleArrayElements,
-  JNI::GetBooleanArrayRegion,
-  JNI::GetByteArrayRegion,
-  JNI::GetCharArrayRegion,
-  JNI::GetShortArrayRegion,
-  JNI::GetIntArrayRegion,
-  JNI::GetLongArrayRegion,
-  JNI::GetFloatArrayRegion,
-  JNI::GetDoubleArrayRegion,
-  JNI::SetBooleanArrayRegion,
-  JNI::SetByteArrayRegion,
-  JNI::SetCharArrayRegion,
-  JNI::SetShortArrayRegion,
-  JNI::SetIntArrayRegion,
-  JNI::SetLongArrayRegion,
-  JNI::SetFloatArrayRegion,
-  JNI::SetDoubleArrayRegion,
-  JNI::RegisterNatives,
-  JNI::UnregisterNatives,
-  JNI::MonitorEnter,
-  JNI::MonitorExit,
-  JNI::GetJavaVM,
-  JNI::GetStringRegion,
-  JNI::GetStringUTFRegion,
-  JNI::GetPrimitiveArrayCritical,
-  JNI::ReleasePrimitiveArrayCritical,
-  JNI::GetStringCritical,
-  JNI::ReleaseStringCritical,
-  JNI::NewWeakGlobalRef,
-  JNI::DeleteWeakGlobalRef,
-  JNI::ExceptionCheck,
-  JNI::NewDirectByteBuffer,
-  JNI::GetDirectBufferAddress,
-  JNI::GetDirectBufferCapacity,
-  JNI::GetObjectRefType,
+template<bool kEnableIndexIds>
+struct JniNativeInterfaceFunctions {
+  using JNIImpl = JNI<kEnableIndexIds>;
+  static constexpr JNINativeInterface gJniNativeInterface = {
+    nullptr,  // reserved0.
+    nullptr,  // reserved1.
+    nullptr,  // reserved2.
+    nullptr,  // reserved3.
+    JNIImpl::GetVersion,
+    JNIImpl::DefineClass,
+    JNIImpl::FindClass,
+    JNIImpl::FromReflectedMethod,
+    JNIImpl::FromReflectedField,
+    JNIImpl::ToReflectedMethod,
+    JNIImpl::GetSuperclass,
+    JNIImpl::IsAssignableFrom,
+    JNIImpl::ToReflectedField,
+    JNIImpl::Throw,
+    JNIImpl::ThrowNew,
+    JNIImpl::ExceptionOccurred,
+    JNIImpl::ExceptionDescribe,
+    JNIImpl::ExceptionClear,
+    JNIImpl::FatalError,
+    JNIImpl::PushLocalFrame,
+    JNIImpl::PopLocalFrame,
+    JNIImpl::NewGlobalRef,
+    JNIImpl::DeleteGlobalRef,
+    JNIImpl::DeleteLocalRef,
+    JNIImpl::IsSameObject,
+    JNIImpl::NewLocalRef,
+    JNIImpl::EnsureLocalCapacity,
+    JNIImpl::AllocObject,
+    JNIImpl::NewObject,
+    JNIImpl::NewObjectV,
+    JNIImpl::NewObjectA,
+    JNIImpl::GetObjectClass,
+    JNIImpl::IsInstanceOf,
+    JNIImpl::GetMethodID,
+    JNIImpl::CallObjectMethod,
+    JNIImpl::CallObjectMethodV,
+    JNIImpl::CallObjectMethodA,
+    JNIImpl::CallBooleanMethod,
+    JNIImpl::CallBooleanMethodV,
+    JNIImpl::CallBooleanMethodA,
+    JNIImpl::CallByteMethod,
+    JNIImpl::CallByteMethodV,
+    JNIImpl::CallByteMethodA,
+    JNIImpl::CallCharMethod,
+    JNIImpl::CallCharMethodV,
+    JNIImpl::CallCharMethodA,
+    JNIImpl::CallShortMethod,
+    JNIImpl::CallShortMethodV,
+    JNIImpl::CallShortMethodA,
+    JNIImpl::CallIntMethod,
+    JNIImpl::CallIntMethodV,
+    JNIImpl::CallIntMethodA,
+    JNIImpl::CallLongMethod,
+    JNIImpl::CallLongMethodV,
+    JNIImpl::CallLongMethodA,
+    JNIImpl::CallFloatMethod,
+    JNIImpl::CallFloatMethodV,
+    JNIImpl::CallFloatMethodA,
+    JNIImpl::CallDoubleMethod,
+    JNIImpl::CallDoubleMethodV,
+    JNIImpl::CallDoubleMethodA,
+    JNIImpl::CallVoidMethod,
+    JNIImpl::CallVoidMethodV,
+    JNIImpl::CallVoidMethodA,
+    JNIImpl::CallNonvirtualObjectMethod,
+    JNIImpl::CallNonvirtualObjectMethodV,
+    JNIImpl::CallNonvirtualObjectMethodA,
+    JNIImpl::CallNonvirtualBooleanMethod,
+    JNIImpl::CallNonvirtualBooleanMethodV,
+    JNIImpl::CallNonvirtualBooleanMethodA,
+    JNIImpl::CallNonvirtualByteMethod,
+    JNIImpl::CallNonvirtualByteMethodV,
+    JNIImpl::CallNonvirtualByteMethodA,
+    JNIImpl::CallNonvirtualCharMethod,
+    JNIImpl::CallNonvirtualCharMethodV,
+    JNIImpl::CallNonvirtualCharMethodA,
+    JNIImpl::CallNonvirtualShortMethod,
+    JNIImpl::CallNonvirtualShortMethodV,
+    JNIImpl::CallNonvirtualShortMethodA,
+    JNIImpl::CallNonvirtualIntMethod,
+    JNIImpl::CallNonvirtualIntMethodV,
+    JNIImpl::CallNonvirtualIntMethodA,
+    JNIImpl::CallNonvirtualLongMethod,
+    JNIImpl::CallNonvirtualLongMethodV,
+    JNIImpl::CallNonvirtualLongMethodA,
+    JNIImpl::CallNonvirtualFloatMethod,
+    JNIImpl::CallNonvirtualFloatMethodV,
+    JNIImpl::CallNonvirtualFloatMethodA,
+    JNIImpl::CallNonvirtualDoubleMethod,
+    JNIImpl::CallNonvirtualDoubleMethodV,
+    JNIImpl::CallNonvirtualDoubleMethodA,
+    JNIImpl::CallNonvirtualVoidMethod,
+    JNIImpl::CallNonvirtualVoidMethodV,
+    JNIImpl::CallNonvirtualVoidMethodA,
+    JNIImpl::GetFieldID,
+    JNIImpl::GetObjectField,
+    JNIImpl::GetBooleanField,
+    JNIImpl::GetByteField,
+    JNIImpl::GetCharField,
+    JNIImpl::GetShortField,
+    JNIImpl::GetIntField,
+    JNIImpl::GetLongField,
+    JNIImpl::GetFloatField,
+    JNIImpl::GetDoubleField,
+    JNIImpl::SetObjectField,
+    JNIImpl::SetBooleanField,
+    JNIImpl::SetByteField,
+    JNIImpl::SetCharField,
+    JNIImpl::SetShortField,
+    JNIImpl::SetIntField,
+    JNIImpl::SetLongField,
+    JNIImpl::SetFloatField,
+    JNIImpl::SetDoubleField,
+    JNIImpl::GetStaticMethodID,
+    JNIImpl::CallStaticObjectMethod,
+    JNIImpl::CallStaticObjectMethodV,
+    JNIImpl::CallStaticObjectMethodA,
+    JNIImpl::CallStaticBooleanMethod,
+    JNIImpl::CallStaticBooleanMethodV,
+    JNIImpl::CallStaticBooleanMethodA,
+    JNIImpl::CallStaticByteMethod,
+    JNIImpl::CallStaticByteMethodV,
+    JNIImpl::CallStaticByteMethodA,
+    JNIImpl::CallStaticCharMethod,
+    JNIImpl::CallStaticCharMethodV,
+    JNIImpl::CallStaticCharMethodA,
+    JNIImpl::CallStaticShortMethod,
+    JNIImpl::CallStaticShortMethodV,
+    JNIImpl::CallStaticShortMethodA,
+    JNIImpl::CallStaticIntMethod,
+    JNIImpl::CallStaticIntMethodV,
+    JNIImpl::CallStaticIntMethodA,
+    JNIImpl::CallStaticLongMethod,
+    JNIImpl::CallStaticLongMethodV,
+    JNIImpl::CallStaticLongMethodA,
+    JNIImpl::CallStaticFloatMethod,
+    JNIImpl::CallStaticFloatMethodV,
+    JNIImpl::CallStaticFloatMethodA,
+    JNIImpl::CallStaticDoubleMethod,
+    JNIImpl::CallStaticDoubleMethodV,
+    JNIImpl::CallStaticDoubleMethodA,
+    JNIImpl::CallStaticVoidMethod,
+    JNIImpl::CallStaticVoidMethodV,
+    JNIImpl::CallStaticVoidMethodA,
+    JNIImpl::GetStaticFieldID,
+    JNIImpl::GetStaticObjectField,
+    JNIImpl::GetStaticBooleanField,
+    JNIImpl::GetStaticByteField,
+    JNIImpl::GetStaticCharField,
+    JNIImpl::GetStaticShortField,
+    JNIImpl::GetStaticIntField,
+    JNIImpl::GetStaticLongField,
+    JNIImpl::GetStaticFloatField,
+    JNIImpl::GetStaticDoubleField,
+    JNIImpl::SetStaticObjectField,
+    JNIImpl::SetStaticBooleanField,
+    JNIImpl::SetStaticByteField,
+    JNIImpl::SetStaticCharField,
+    JNIImpl::SetStaticShortField,
+    JNIImpl::SetStaticIntField,
+    JNIImpl::SetStaticLongField,
+    JNIImpl::SetStaticFloatField,
+    JNIImpl::SetStaticDoubleField,
+    JNIImpl::NewString,
+    JNIImpl::GetStringLength,
+    JNIImpl::GetStringChars,
+    JNIImpl::ReleaseStringChars,
+    JNIImpl::NewStringUTF,
+    JNIImpl::GetStringUTFLength,
+    JNIImpl::GetStringUTFChars,
+    JNIImpl::ReleaseStringUTFChars,
+    JNIImpl::GetArrayLength,
+    JNIImpl::NewObjectArray,
+    JNIImpl::GetObjectArrayElement,
+    JNIImpl::SetObjectArrayElement,
+    JNIImpl::NewBooleanArray,
+    JNIImpl::NewByteArray,
+    JNIImpl::NewCharArray,
+    JNIImpl::NewShortArray,
+    JNIImpl::NewIntArray,
+    JNIImpl::NewLongArray,
+    JNIImpl::NewFloatArray,
+    JNIImpl::NewDoubleArray,
+    JNIImpl::GetBooleanArrayElements,
+    JNIImpl::GetByteArrayElements,
+    JNIImpl::GetCharArrayElements,
+    JNIImpl::GetShortArrayElements,
+    JNIImpl::GetIntArrayElements,
+    JNIImpl::GetLongArrayElements,
+    JNIImpl::GetFloatArrayElements,
+    JNIImpl::GetDoubleArrayElements,
+    JNIImpl::ReleaseBooleanArrayElements,
+    JNIImpl::ReleaseByteArrayElements,
+    JNIImpl::ReleaseCharArrayElements,
+    JNIImpl::ReleaseShortArrayElements,
+    JNIImpl::ReleaseIntArrayElements,
+    JNIImpl::ReleaseLongArrayElements,
+    JNIImpl::ReleaseFloatArrayElements,
+    JNIImpl::ReleaseDoubleArrayElements,
+    JNIImpl::GetBooleanArrayRegion,
+    JNIImpl::GetByteArrayRegion,
+    JNIImpl::GetCharArrayRegion,
+    JNIImpl::GetShortArrayRegion,
+    JNIImpl::GetIntArrayRegion,
+    JNIImpl::GetLongArrayRegion,
+    JNIImpl::GetFloatArrayRegion,
+    JNIImpl::GetDoubleArrayRegion,
+    JNIImpl::SetBooleanArrayRegion,
+    JNIImpl::SetByteArrayRegion,
+    JNIImpl::SetCharArrayRegion,
+    JNIImpl::SetShortArrayRegion,
+    JNIImpl::SetIntArrayRegion,
+    JNIImpl::SetLongArrayRegion,
+    JNIImpl::SetFloatArrayRegion,
+    JNIImpl::SetDoubleArrayRegion,
+    JNIImpl::RegisterNatives,
+    JNIImpl::UnregisterNatives,
+    JNIImpl::MonitorEnter,
+    JNIImpl::MonitorExit,
+    JNIImpl::GetJavaVM,
+    JNIImpl::GetStringRegion,
+    JNIImpl::GetStringUTFRegion,
+    JNIImpl::GetPrimitiveArrayCritical,
+    JNIImpl::ReleasePrimitiveArrayCritical,
+    JNIImpl::GetStringCritical,
+    JNIImpl::ReleaseStringCritical,
+    JNIImpl::NewWeakGlobalRef,
+    JNIImpl::DeleteWeakGlobalRef,
+    JNIImpl::ExceptionCheck,
+    JNIImpl::NewDirectByteBuffer,
+    JNIImpl::GetDirectBufferAddress,
+    JNIImpl::GetDirectBufferCapacity,
+    JNIImpl::GetObjectRefType,
+  };
 };
 
 const JNINativeInterface* GetJniNativeInterface() {
-  return &gJniNativeInterface;
+  // The template argument is passed down through the Encode/DecodeArtMethod/Field calls so if
+  // JniIdsAreIndices is false the calls will be a simple cast with no branches. This ensures that
+  // the normal case is still fast.
+  return Runtime::Current()->JniIdsAreIndices()
+             ? &JniNativeInterfaceFunctions<true>::gJniNativeInterface
+             : &JniNativeInterfaceFunctions<false>::gJniNativeInterface;
 }
 
 void (*gJniSleepForeverStub[])()  = {
diff --git a/runtime/jni/jni_internal.h b/runtime/jni/jni_internal.h
index 4359074..b6e106c 100644
--- a/runtime/jni/jni_internal.h
+++ b/runtime/jni/jni_internal.h
@@ -20,7 +20,9 @@
 #include <jni.h>
 #include <iosfwd>
 
+#include "base/locks.h"
 #include "base/macros.h"
+#include "runtime.h"
 
 namespace art {
 
@@ -41,24 +43,61 @@
 
 namespace jni {
 
+// We want to maintain a branchless fast-path for performance reasons. The JniIdManager is the
+// ultimate source of truth for how the IDs are handed out but we inline the normal non-index cases
+// here.
+
+template <bool kEnableIndexIds>
+ALWAYS_INLINE
+static bool IsIndexId(jmethodID mid) {
+  return kEnableIndexIds && ((reinterpret_cast<uintptr_t>(mid) % 2) != 0);
+}
+
+template <bool kEnableIndexIds>
+ALWAYS_INLINE
+static bool IsIndexId(jfieldID fid) {
+  return kEnableIndexIds && ((reinterpret_cast<uintptr_t>(fid) % 2) != 0);
+}
+
+template <bool kEnableIndexIds = true>
 ALWAYS_INLINE
 static inline ArtField* DecodeArtField(jfieldID fid) {
-  return reinterpret_cast<ArtField*>(fid);
+  if (IsIndexId<kEnableIndexIds>(fid)) {
+    return Runtime::Current()->GetJniIdManager()->DecodeFieldId(fid);
+  } else {
+    return reinterpret_cast<ArtField*>(fid);
+  }
 }
 
+template <bool kEnableIndexIds = true>
 ALWAYS_INLINE
-static inline jfieldID EncodeArtField(ArtField* field) {
-  return reinterpret_cast<jfieldID>(field);
+static inline jfieldID EncodeArtField(ArtField* field) REQUIRES_SHARED(Locks::mutator_lock_)  {
+  if (kEnableIndexIds && Runtime::Current()->JniIdsAreIndices()) {
+    return Runtime::Current()->GetJniIdManager()->EncodeFieldId(field);
+  } else {
+    return reinterpret_cast<jfieldID>(field);
+  }
 }
 
+template <bool kEnableIndexIds = true>
 ALWAYS_INLINE
-static inline jmethodID EncodeArtMethod(ArtMethod* art_method) {
-  return reinterpret_cast<jmethodID>(art_method);
+static inline jmethodID EncodeArtMethod(ArtMethod* art_method)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+  if (kEnableIndexIds && Runtime::Current()->JniIdsAreIndices()) {
+    return Runtime::Current()->GetJniIdManager()->EncodeMethodId(art_method);
+  } else {
+    return reinterpret_cast<jmethodID>(art_method);
+  }
 }
 
+template <bool kEnableIndexIds = true>
 ALWAYS_INLINE
 static inline ArtMethod* DecodeArtMethod(jmethodID method_id) {
-  return reinterpret_cast<ArtMethod*>(method_id);
+  if (IsIndexId<kEnableIndexIds>(method_id)) {
+    return Runtime::Current()->GetJniIdManager()->DecodeMethodId(method_id);
+  } else {
+    return reinterpret_cast<ArtMethod*>(method_id);
+  }
 }
 
 }  // namespace jni
diff --git a/runtime/jni_id_type.h b/runtime/jni_id_type.h
new file mode 100644
index 0000000..7802ec6
--- /dev/null
+++ b/runtime/jni_id_type.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_JNI_ID_TYPE_H_
+#define ART_RUNTIME_JNI_ID_TYPE_H_
+
+#include <iosfwd>
+
+namespace art {
+
+enum class JniIdType {
+  // All Jni method/field IDs are pointers to the corresponding Art{Field,Method} type
+  kPointer,
+
+  // All Jni method/field IDs are indices into a table.
+  kIndices,
+
+  // The current default provider. Used if you run -XjdwpProvider:default
+  kDefault = kPointer,
+};
+
+std::ostream& operator<<(std::ostream& os, const JniIdType& rhs);
+
+}  // namespace art
+#endif  // ART_RUNTIME_JNI_ID_TYPE_H_
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index a36fe12..c0a950d 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -16,14 +16,17 @@
 
 #include "class.h"
 
+#include "android-base/macros.h"
 #include "android-base/stringprintf.h"
 
+#include "array-inl.h"
 #include "art_field-inl.h"
 #include "art_method-inl.h"
+#include "base/enums.h"
 #include "base/logging.h"  // For VLOG.
 #include "base/utils.h"
 #include "class-inl.h"
-#include "class_ext.h"
+#include "class_ext-inl.h"
 #include "class_linker-inl.h"
 #include "class_loader.h"
 #include "class_root.h"
@@ -1547,5 +1550,126 @@
         (new_access_flags & kAccVerificationAttempted) != 0);
 }
 
+ObjPtr<PointerArray> Class::GetMethodIds() {
+  ObjPtr<ClassExt> ext(GetExtData());
+  if (ext.IsNull()) {
+    return nullptr;
+  } else {
+    return ext->GetJMethodIDs();
+  }
+}
+ObjPtr<PointerArray> Class::GetOrCreateMethodIds() {
+  DCHECK(Runtime::Current()->JniIdsAreIndices()) << "JNI Ids are pointers!";
+  Thread* self = Thread::Current();
+  StackHandleScope<1> hs(self);
+  Handle<Class> h_this(hs.NewHandle(this));
+  ObjPtr<ClassExt> ext(EnsureExtDataPresent(h_this, self));
+  if (ext.IsNull()) {
+    self->AssertPendingOOMException();
+    return nullptr;
+  }
+  return ext->EnsureJMethodIDsArrayPresent(NumMethods());
+}
+
+ObjPtr<PointerArray> Class::GetStaticFieldIds() {
+  ObjPtr<ClassExt> ext(GetExtData());
+  if (ext.IsNull()) {
+    return nullptr;
+  } else {
+    return ext->GetStaticJFieldIDs();
+  }
+}
+ObjPtr<PointerArray> Class::GetOrCreateStaticFieldIds() {
+  DCHECK(Runtime::Current()->JniIdsAreIndices()) << "JNI Ids are pointers!";
+  Thread* self = Thread::Current();
+  StackHandleScope<1> hs(self);
+  Handle<Class> h_this(hs.NewHandle(this));
+  ObjPtr<ClassExt> ext(EnsureExtDataPresent(h_this, self));
+  if (ext.IsNull()) {
+    self->AssertPendingOOMException();
+    return nullptr;
+  }
+  return ext->EnsureStaticJFieldIDsArrayPresent(NumStaticFields());
+}
+ObjPtr<PointerArray> Class::GetInstanceFieldIds() {
+  ObjPtr<ClassExt> ext(GetExtData());
+  if (ext.IsNull()) {
+    return nullptr;
+  } else {
+    return ext->GetInstanceJFieldIDs();
+  }
+}
+ObjPtr<PointerArray> Class::GetOrCreateInstanceFieldIds() {
+  DCHECK(Runtime::Current()->JniIdsAreIndices()) << "JNI Ids are pointers!";
+  Thread* self = Thread::Current();
+  StackHandleScope<1> hs(self);
+  Handle<Class> h_this(hs.NewHandle(this));
+  ObjPtr<ClassExt> ext(EnsureExtDataPresent(h_this, self));
+  if (ext.IsNull()) {
+    self->AssertPendingOOMException();
+    return nullptr;
+  }
+  return ext->EnsureInstanceJFieldIDsArrayPresent(NumInstanceFields());
+}
+
+size_t Class::GetStaticFieldIdOffset(ArtField* field) {
+  DCHECK_LT(reinterpret_cast<uintptr_t>(field),
+            reinterpret_cast<uintptr_t>(&*GetSFieldsPtr()->end()))
+      << "field not part of the current class. " << field->PrettyField() << " class is "
+      << PrettyClass();
+  DCHECK_GE(reinterpret_cast<uintptr_t>(field),
+            reinterpret_cast<uintptr_t>(&*GetSFieldsPtr()->begin()))
+      << "field not part of the current class. " << field->PrettyField() << " class is "
+      << PrettyClass();
+  uintptr_t start = reinterpret_cast<uintptr_t>(&GetSFieldsPtr()->At(0));
+  uintptr_t fld = reinterpret_cast<uintptr_t>(field);
+  size_t res = (fld - start) / sizeof(ArtField);
+  DCHECK_EQ(&GetSFieldsPtr()->At(res), field)
+      << "Incorrect field computation expected: " << field->PrettyField()
+      << " got: " << GetSFieldsPtr()->At(res).PrettyField();
+  return res;
+}
+
+size_t Class::GetInstanceFieldIdOffset(ArtField* field) {
+  DCHECK_LT(reinterpret_cast<uintptr_t>(field),
+            reinterpret_cast<uintptr_t>(&*GetIFieldsPtr()->end()))
+      << "field not part of the current class. " << field->PrettyField() << " class is "
+      << PrettyClass();
+  DCHECK_GE(reinterpret_cast<uintptr_t>(field),
+            reinterpret_cast<uintptr_t>(&*GetIFieldsPtr()->begin()))
+      << "field not part of the current class. " << field->PrettyField() << " class is "
+      << PrettyClass();
+  uintptr_t start = reinterpret_cast<uintptr_t>(&GetIFieldsPtr()->At(0));
+  uintptr_t fld = reinterpret_cast<uintptr_t>(field);
+  size_t res = (fld - start) / sizeof(ArtField);
+  DCHECK_EQ(&GetIFieldsPtr()->At(res), field)
+      << "Incorrect field computation expected: " << field->PrettyField()
+      << " got: " << GetIFieldsPtr()->At(res).PrettyField();
+  return res;
+}
+
+size_t Class::GetMethodIdOffset(ArtMethod* method, PointerSize pointer_size) {
+  DCHECK(GetMethodsSlice(kRuntimePointerSize).Contains(method))
+      << "method not part of the current class. " << method->PrettyMethod() << "( " << reinterpret_cast<void*>(method) << ")" << " class is "
+      << PrettyClass() << [&]() REQUIRES_SHARED(Locks::mutator_lock_) {
+        std::ostringstream os;
+        os << " Methods are [";
+        for (ArtMethod& m : GetMethodsSlice(kRuntimePointerSize)) {
+          os << m.PrettyMethod() << "( " << reinterpret_cast<void*>(&m) << "), ";
+        }
+        os << "]";
+        return os.str();
+      }();
+  uintptr_t start = reinterpret_cast<uintptr_t>(&*GetMethodsSlice(pointer_size).begin());
+  uintptr_t fld = reinterpret_cast<uintptr_t>(method);
+  size_t art_method_size = ArtMethod::Size(pointer_size);
+  size_t art_method_align = ArtMethod::Alignment(pointer_size);
+  size_t res = (fld - start) / art_method_size;
+  DCHECK_EQ(&GetMethodsPtr()->At(res, art_method_size, art_method_align), method)
+      << "Incorrect method computation expected: " << method->PrettyMethod()
+      << " got: " << GetMethodsPtr()->At(res, art_method_size, art_method_align).PrettyMethod();
+  return res;
+}
+
 }  // namespace mirror
 }  // namespace art
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 09d5532..144350f 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -1229,6 +1229,23 @@
   void FixupNativePointers(Class* dest, PointerSize pointer_size, const Visitor& visitor)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
+  // Get or create the various jni id arrays in a lock-less thread safe manner.
+  ObjPtr<PointerArray> GetOrCreateMethodIds() REQUIRES_SHARED(Locks::mutator_lock_);
+  ObjPtr<PointerArray> GetMethodIds() REQUIRES_SHARED(Locks::mutator_lock_);
+  ObjPtr<PointerArray> GetOrCreateStaticFieldIds() REQUIRES_SHARED(Locks::mutator_lock_);
+  ObjPtr<PointerArray> GetStaticFieldIds() REQUIRES_SHARED(Locks::mutator_lock_);
+  ObjPtr<PointerArray> GetOrCreateInstanceFieldIds() REQUIRES_SHARED(Locks::mutator_lock_);
+  ObjPtr<PointerArray> GetInstanceFieldIds() REQUIRES_SHARED(Locks::mutator_lock_);
+
+  // Calculate the index in the ifields_, methods_ or sfields_ arrays a method is located at. This
+  // is to be used with the above Get{,OrCreate}...Ids functions.
+  size_t GetStaticFieldIdOffset(ArtField* field)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+  size_t GetInstanceFieldIdOffset(ArtField* field)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+  size_t GetMethodIdOffset(ArtMethod* method, PointerSize pointer_size)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
  private:
   template <typename T, VerifyObjectFlags kVerifyFlags, typename Visitor>
   void FixupNativePointer(
diff --git a/runtime/mirror/class_ext-inl.h b/runtime/mirror/class_ext-inl.h
index bf51654..ead02ee 100644
--- a/runtime/mirror/class_ext-inl.h
+++ b/runtime/mirror/class_ext-inl.h
@@ -19,12 +19,81 @@
 
 #include "class_ext.h"
 
+#include "array-inl.h"
 #include "art_method-inl.h"
+#include "handle_scope.h"
 #include "object-inl.h"
 
 namespace art {
 namespace mirror {
 
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<PointerArray> ClassExt::EnsureJniIdsArrayPresent(MemberOffset off, size_t count) {
+  ObjPtr<PointerArray> existing(
+      GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(off));
+  if (!existing.IsNull()) {
+    return existing;
+  }
+  Thread* self = Thread::Current();
+  StackHandleScope<2> hs(self);
+  Handle<ClassExt> h_this(hs.NewHandle(this));
+  Handle<PointerArray> new_arr(
+      hs.NewHandle(Runtime::Current()->GetClassLinker()->AllocPointerArray(self, count)));
+  if (new_arr.IsNull()) {
+    // Fail.
+    self->AssertPendingOOMException();
+    return nullptr;
+  }
+  bool set;
+  // Set the ext_data_ field using CAS semantics.
+  if (Runtime::Current()->IsActiveTransaction()) {
+    set = h_this->CasFieldObject<true>(
+        off, nullptr, new_arr.Get(), CASMode::kStrong, std::memory_order_seq_cst);
+  } else {
+    set = h_this->CasFieldObject<false>(
+        off, nullptr, new_arr.Get(), CASMode::kStrong, std::memory_order_seq_cst);
+  }
+  ObjPtr<PointerArray> ret(
+      set ? new_arr.Get()
+          : h_this->GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(off));
+  CHECK(!ret.IsNull());
+  return ret;
+}
+
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<PointerArray> ClassExt::EnsureJMethodIDsArrayPresent(size_t count) {
+  return EnsureJniIdsArrayPresent<kVerifyFlags, kReadBarrierOption>(
+      MemberOffset(OFFSET_OF_OBJECT_MEMBER(ClassExt, jmethod_ids_)), count);
+}
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<PointerArray> ClassExt::EnsureStaticJFieldIDsArrayPresent(size_t count) {
+  return EnsureJniIdsArrayPresent<kVerifyFlags, kReadBarrierOption>(
+      MemberOffset(OFFSET_OF_OBJECT_MEMBER(ClassExt, static_jfield_ids_)), count);
+}
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<PointerArray> ClassExt::EnsureInstanceJFieldIDsArrayPresent(size_t count) {
+  return EnsureJniIdsArrayPresent<kVerifyFlags, kReadBarrierOption>(
+      MemberOffset(OFFSET_OF_OBJECT_MEMBER(ClassExt, instance_jfield_ids_)), count);
+}
+
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<PointerArray> ClassExt::GetInstanceJFieldIDs() {
+  return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(
+      OFFSET_OF_OBJECT_MEMBER(ClassExt, instance_jfield_ids_));
+}
+
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<PointerArray> ClassExt::GetStaticJFieldIDs() {
+  return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(
+      OFFSET_OF_OBJECT_MEMBER(ClassExt, static_jfield_ids_));
+}
+
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<PointerArray> ClassExt::GetJMethodIDs() {
+  return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(
+      OFFSET_OF_OBJECT_MEMBER(ClassExt, jmethod_ids_));
+}
+
 inline ObjPtr<Object> ClassExt::GetVerifyError() {
   return GetFieldObject<ClassExt>(OFFSET_OF_OBJECT_MEMBER(ClassExt, verify_error_));
 }
diff --git a/runtime/mirror/class_ext.h b/runtime/mirror/class_ext.h
index 70bea33..6fb225f 100644
--- a/runtime/mirror/class_ext.h
+++ b/runtime/mirror/class_ext.h
@@ -48,6 +48,33 @@
 
   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  ObjPtr<PointerArray> EnsureInstanceJFieldIDsArrayPresent(size_t count)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  ObjPtr<PointerArray> GetInstanceJFieldIDs() REQUIRES_SHARED(Locks::mutator_lock_);
+
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  ObjPtr<PointerArray> EnsureStaticJFieldIDsArrayPresent(size_t count)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  ObjPtr<PointerArray> GetStaticJFieldIDs() REQUIRES_SHARED(Locks::mutator_lock_);
+
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  ObjPtr<PointerArray> EnsureJMethodIDsArrayPresent(size_t count)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  ObjPtr<PointerArray> GetJMethodIDs() REQUIRES_SHARED(Locks::mutator_lock_);
+
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   ObjPtr<PointerArray> GetObsoleteMethods() REQUIRES_SHARED(Locks::mutator_lock_);
 
   ObjPtr<Object> GetOriginalDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -82,19 +109,36 @@
   static ObjPtr<ClassExt> Alloc(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
 
  private:
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  ObjPtr<PointerArray> EnsureJniIdsArrayPresent(MemberOffset off, size_t count)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
+  // An array containing the jfieldIDs assigned to each field in the corresponding position in the
+  // classes ifields_ array or '0' if no id has been assigned to that field yet.
+  HeapReference<PointerArray> instance_jfield_ids_;
+
+  // An array containing the jmethodIDs assigned to each method in the corresponding position in
+  // the classes methods_ array or '0' if no id has been assigned to that method yet.
+  HeapReference<PointerArray> jmethod_ids_;
+
   HeapReference<ObjectArray<DexCache>> obsolete_dex_caches_;
 
   HeapReference<PointerArray> obsolete_methods_;
 
   HeapReference<Object> original_dex_file_;
 
+  // An array containing the jfieldIDs assigned to each field in the corresponding position in the
+  // classes sfields_ array or '0' if no id has been assigned to that field yet.
+  HeapReference<PointerArray> static_jfield_ids_;
+
   // The saved verification error of this class.
   HeapReference<Object> verify_error_;
 
   // Native pointer to DexFile and ClassDef index of this class before it was JVMTI-redefined.
-  int64_t pre_redefine_dex_file_ptr_;
   int32_t pre_redefine_class_def_index_;
+  int64_t pre_redefine_dex_file_ptr_;
 
   friend struct art::ClassExtOffsets;  // for verifying offset information
   DISALLOW_IMPLICIT_CONSTRUCTORS(ClassExt);
diff --git a/runtime/parsed_options.cc b/runtime/parsed_options.cc
index a5e9d95..afcf40d 100644
--- a/runtime/parsed_options.cc
+++ b/runtime/parsed_options.cc
@@ -367,6 +367,10 @@
           .WithType<bool>()
           .WithValueMap({{"false", false}, {"true", true}})
           .IntoKey(M::FastClassNotFoundException)
+      .Define("-Xopaque-jni-ids:_")
+          .WithType<bool>()
+          .WithValueMap({{"true", true}, {"false", false}})
+          .IntoKey(M::OpaqueJniIds)
       .Ignore({
           "-ea", "-da", "-enableassertions", "-disableassertions", "--runtime-arg", "-esa",
           "-dsa", "-enablesystemassertions", "-disablesystemassertions", "-Xrs", "-Xint:_",
@@ -784,6 +788,7 @@
                        "(Enable new and experimental agent support)\n");
   UsageMessage(stream, "  -Xexperimental:agents"
                        "(Enable new and experimental agent support)\n");
+  UsageMessage(stream, "  -Xopaque-jni-ids:{true,false} (Use opauque integers for jni ids)\n");
   UsageMessage(stream, "\n");
 
   UsageMessage(stream, "The following previously supported Dalvik options are ignored:\n");
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index f0d0538..eb696fb 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -94,7 +94,9 @@
 #include "jit/jit_code_cache.h"
 #include "jit/profile_saver.h"
 #include "jni/java_vm_ext.h"
+#include "jni/jni_id_manager.h"
 #include "jni/jni_internal.h"
+#include "jni_id_type.h"
 #include "linear_alloc.h"
 #include "memory_representation.h"
 #include "mirror/array.h"
@@ -1181,6 +1183,8 @@
 
   oat_file_manager_ = new OatFileManager;
 
+  jni_id_manager_.reset(new jni::JniIdManager);
+
   Thread::SetSensitiveThreadHook(runtime_options.GetOrDefault(Opt::HookIsSensitiveThread));
   Monitor::Init(runtime_options.GetOrDefault(Opt::LockProfThreshold),
                 runtime_options.GetOrDefault(Opt::StackDumpLockProfThreshold));
@@ -1302,6 +1306,13 @@
   is_low_memory_mode_ = runtime_options.Exists(Opt::LowMemoryMode);
   madvise_random_access_ = runtime_options.GetOrDefault(Opt::MadviseRandomAccess);
 
+  if (!runtime_options.Exists(Opt::OpaqueJniIds)) {
+    jni_ids_indirection_ = JniIdType::kDefault;
+  } else {
+    jni_ids_indirection_ = *runtime_options.Get(Opt::OpaqueJniIds) ? JniIdType::kIndices
+                                                                   : JniIdType::kPointer;
+  }
+
   plugins_ = runtime_options.ReleaseOrDefault(Opt::Plugins);
   agent_specs_ = runtime_options.ReleaseOrDefault(Opt::AgentPath);
   // TODO Add back in -agentlib
diff --git a/runtime/runtime.h b/runtime/runtime.h
index 6e27a9f..53e669c 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -37,6 +37,8 @@
 #include "gc_root.h"
 #include "instrumentation.h"
 #include "jdwp_provider.h"
+#include "jni/jni_id_manager.h"
+#include "jni_id_type.h"
 #include "obj_ptr.h"
 #include "offsets.h"
 #include "process_state.h"
@@ -279,6 +281,10 @@
     return class_linker_;
   }
 
+  jni::JniIdManager* GetJniIdManager() const {
+    return jni_id_manager_.get();
+  }
+
   size_t GetDefaultStackSize() const {
     return default_stack_size_;
   }
@@ -834,6 +840,10 @@
     return jdwp_provider_;
   }
 
+  bool JniIdsAreIndices() const {
+    return jni_ids_indirection_ != JniIdType::kPointer;
+  }
+
   uint32_t GetVerifierLoggingThresholdMs() const {
     return verifier_logging_threshold_ms_;
   }
@@ -1005,6 +1015,8 @@
 
   SignalCatcher* signal_catcher_;
 
+  std::unique_ptr<jni::JniIdManager> jni_id_manager_;
+
   std::unique_ptr<JavaVMExt> java_vm_;
 
   std::unique_ptr<jit::Jit> jit_;
@@ -1188,6 +1200,10 @@
   // The jdwp provider we were configured with.
   JdwpProvider jdwp_provider_;
 
+  // True if jmethodID and jfieldID are opaque Indices. When false (the default) these are simply
+  // pointers. This is set by -Xopaque-jni-ids:{true,false}.
+  JniIdType jni_ids_indirection_;
+
   // Saved environment.
   class EnvSnapshot {
    public:
diff --git a/runtime/runtime_options.def b/runtime/runtime_options.def
index 43a7229..d2594b2 100644
--- a/runtime/runtime_options.def
+++ b/runtime/runtime_options.def
@@ -77,6 +77,7 @@
 RUNTIME_OPTIONS_KEY (bool,                UseJitCompilation,              true)
 RUNTIME_OPTIONS_KEY (bool,                DumpNativeStackOnSigQuit,       true)
 RUNTIME_OPTIONS_KEY (bool,                MadviseRandomAccess,            false)
+RUNTIME_OPTIONS_KEY (bool,                OpaqueJniIds,                   false)  // -Xopaque-jni-ids:{true, false}
 RUNTIME_OPTIONS_KEY (unsigned int,        JITCompileThreshold)
 RUNTIME_OPTIONS_KEY (unsigned int,        JITWarmupThreshold)
 RUNTIME_OPTIONS_KEY (unsigned int,        JITOsrThreshold)
diff --git a/runtime/thread.cc b/runtime/thread.cc
index da76e96..6ff4c71 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -4280,8 +4280,16 @@
   self_->ClearException();
 }
 
+void ScopedExceptionStorage::SuppressOldException(const char* message) {
+  CHECK(self_->IsExceptionPending()) << *self_;
+  ObjPtr<mirror::Throwable> old_suppressed(excp_.Get());
+  excp_.Assign(self_->GetException());
+  LOG(WARNING) << message << "Suppressing old exception: " << old_suppressed->Dump();
+  self_->ClearException();
+}
+
 ScopedExceptionStorage::~ScopedExceptionStorage() {
-  CHECK(!self_->IsExceptionPending()) << self_;
+  CHECK(!self_->IsExceptionPending()) << *self_;
   if (!excp_.IsNull()) {
     self_->SetException(excp_.Get());
   }
diff --git a/runtime/thread.h b/runtime/thread.h
index 8fe9466..9a230e2 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -1901,12 +1901,13 @@
 class ScopedExceptionStorage {
  public:
   explicit ScopedExceptionStorage(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
+  void SuppressOldException(const char* message = "") REQUIRES_SHARED(Locks::mutator_lock_);
   ~ScopedExceptionStorage() REQUIRES_SHARED(Locks::mutator_lock_);
 
  private:
   Thread* self_;
   StackHandleScope<1> hs_;
-  Handle<mirror::Throwable> excp_;
+  MutableHandle<mirror::Throwable> excp_;
 };
 
 std::ostream& operator<<(std::ostream& os, const Thread& thread);
diff --git a/test/testrunner/testrunner.py b/test/testrunner/testrunner.py
index ac1b2c6..379b2c6 100755
--- a/test/testrunner/testrunner.py
+++ b/test/testrunner/testrunner.py
@@ -435,7 +435,7 @@
         options_test += ' --no-image'
 
       if debuggable == 'debuggable':
-        options_test += ' --debuggable'
+        options_test += ' --debuggable --runtime-option -Xopaque-jni-ids:true'
 
       if jvmti == 'jvmti-stress':
         options_test += ' --jvmti-trace-stress --jvmti-redefine-stress --jvmti-field-stress'
@@ -797,7 +797,7 @@
   It supports two types of test_name:
   1) Like 001-HelloWorld. In this case, it will just verify if the test actually
   exists and if it does, it returns the testname.
-  2) Like test-art-host-run-test-debug-prebuild-interpreter-no-relocate-ntrace-cms-checkjni-picimage-ndebuggable-001-HelloWorld32
+  2) Like test-art-host-run-test-debug-prebuild-interpreter-no-relocate-ntrace-cms-checkjni-pointer-ids-picimage-ndebuggable-001-HelloWorld32
   In this case, it will parse all the variants and check if they are placed
   correctly. If yes, it will set the various VARIANT_TYPES to use the
   variants required to run the test. Again, it returns the test_name