Merge changes I5b05e23a,I9350b250,I1d66b42a

* changes:
  More refactor for JVMTI redefinition run tests (3/3)
  More refactor for JVMTI redefinition run tests (2/3)
  More refactor for JVMTI redefinition run tests (1/3)
diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc
index 4297979..41df56b 100644
--- a/compiler/optimizing/intrinsics_mips.cc
+++ b/compiler/optimizing/intrinsics_mips.cc
@@ -2940,6 +2940,199 @@
   GenFPToFPCall(invoke, codegen_, kQuickTanh);
 }
 
+// static void java.lang.System.arraycopy(Object src, int srcPos,
+//                                        Object dest, int destPos,
+//                                        int length)
+void IntrinsicLocationsBuilderMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
+  HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
+  HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
+  HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
+
+  // As long as we are checking, we might as well check to see if the src and dest
+  // positions are >= 0.
+  if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
+      (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
+    // We will have to fail anyways.
+    return;
+  }
+
+  // And since we are already checking, check the length too.
+  if (length != nullptr) {
+    int32_t len = length->GetValue();
+    if (len < 0) {
+      // Just call as normal.
+      return;
+    }
+  }
+
+  // Okay, it is safe to generate inline code.
+  LocationSummary* locations =
+      new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
+  // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
+  locations->SetInAt(0, Location::RequiresRegister());
+  locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
+  locations->SetInAt(2, Location::RequiresRegister());
+  locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
+  locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
+
+  locations->AddTemp(Location::RequiresRegister());
+  locations->AddTemp(Location::RequiresRegister());
+  locations->AddTemp(Location::RequiresRegister());
+}
+
+// Utility routine to verify that "length(input) - pos >= length"
+static void EnoughItems(MipsAssembler* assembler,
+                        Register length_input_minus_pos,
+                        Location length,
+                        SlowPathCodeMIPS* slow_path) {
+  if (length.IsConstant()) {
+    int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
+
+    if (IsInt<16>(length_constant)) {
+      __ Slti(TMP, length_input_minus_pos, length_constant);
+      __ Bnez(TMP, slow_path->GetEntryLabel());
+    } else {
+      __ LoadConst32(TMP, length_constant);
+      __ Blt(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
+    }
+  } else {
+    __ Blt(length_input_minus_pos, length.AsRegister<Register>(), slow_path->GetEntryLabel());
+  }
+}
+
+static void CheckPosition(MipsAssembler* assembler,
+                          Location pos,
+                          Register input,
+                          Location length,
+                          SlowPathCodeMIPS* slow_path,
+                          bool length_is_input_length = false) {
+  // Where is the length in the Array?
+  const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
+
+  // Calculate length(input) - pos.
+  if (pos.IsConstant()) {
+    int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
+    if (pos_const == 0) {
+      if (!length_is_input_length) {
+        // Check that length(input) >= length.
+        __ LoadFromOffset(kLoadWord, AT, input, length_offset);
+        EnoughItems(assembler, AT, length, slow_path);
+      }
+    } else {
+      // Check that (length(input) - pos) >= zero.
+      __ LoadFromOffset(kLoadWord, AT, input, length_offset);
+      DCHECK_GT(pos_const, 0);
+      __ Addiu32(AT, AT, -pos_const, TMP);
+      __ Bltz(AT, slow_path->GetEntryLabel());
+
+      // Verify that (length(input) - pos) >= length.
+      EnoughItems(assembler, AT, length, slow_path);
+    }
+  } else if (length_is_input_length) {
+    // The only way the copy can succeed is if pos is zero.
+    Register pos_reg = pos.AsRegister<Register>();
+    __ Bnez(pos_reg, slow_path->GetEntryLabel());
+  } else {
+    // Verify that pos >= 0.
+    Register pos_reg = pos.AsRegister<Register>();
+    __ Bltz(pos_reg, slow_path->GetEntryLabel());
+
+    // Check that (length(input) - pos) >= zero.
+    __ LoadFromOffset(kLoadWord, AT, input, length_offset);
+    __ Subu(AT, AT, pos_reg);
+    __ Bltz(AT, slow_path->GetEntryLabel());
+
+    // Verify that (length(input) - pos) >= length.
+    EnoughItems(assembler, AT, length, slow_path);
+  }
+}
+
+void IntrinsicCodeGeneratorMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
+  MipsAssembler* assembler = GetAssembler();
+  LocationSummary* locations = invoke->GetLocations();
+
+  Register src = locations->InAt(0).AsRegister<Register>();
+  Location src_pos = locations->InAt(1);
+  Register dest = locations->InAt(2).AsRegister<Register>();
+  Location dest_pos = locations->InAt(3);
+  Location length = locations->InAt(4);
+
+  MipsLabel loop;
+
+  Register dest_base = locations->GetTemp(0).AsRegister<Register>();
+  Register src_base = locations->GetTemp(1).AsRegister<Register>();
+  Register count = locations->GetTemp(2).AsRegister<Register>();
+
+  SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
+  codegen_->AddSlowPath(slow_path);
+
+  // Bail out if the source and destination are the same (to handle overlap).
+  __ Beq(src, dest, slow_path->GetEntryLabel());
+
+  // Bail out if the source is null.
+  __ Beqz(src, slow_path->GetEntryLabel());
+
+  // Bail out if the destination is null.
+  __ Beqz(dest, slow_path->GetEntryLabel());
+
+  // Load length into register for count.
+  if (length.IsConstant()) {
+    __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
+  } else {
+    // If the length is negative, bail out.
+    // We have already checked in the LocationsBuilder for the constant case.
+    __ Bltz(length.AsRegister<Register>(), slow_path->GetEntryLabel());
+
+    __ Move(count, length.AsRegister<Register>());
+  }
+
+  // Validity checks: source.
+  CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
+
+  // Validity checks: dest.
+  CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
+
+  // If count is zero, we're done.
+  __ Beqz(count, slow_path->GetExitLabel());
+
+  // Okay, everything checks out.  Finally time to do the copy.
+  // Check assumption that sizeof(Char) is 2 (used in scaling below).
+  const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
+  DCHECK_EQ(char_size, 2u);
+
+  const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
+
+  const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
+
+  // Calculate source and destination addresses.
+  if (src_pos.IsConstant()) {
+    int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
+
+    __ Addiu32(src_base, src, data_offset + char_size * src_pos_const, TMP);
+  } else {
+    __ Addiu32(src_base, src, data_offset, TMP);
+    __ ShiftAndAdd(src_base, src_pos.AsRegister<Register>(), src_base, char_shift);
+  }
+  if (dest_pos.IsConstant()) {
+    int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
+
+    __ Addiu32(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
+  } else {
+    __ Addiu32(dest_base, dest, data_offset, TMP);
+    __ ShiftAndAdd(dest_base, dest_pos.AsRegister<Register>(), dest_base, char_shift);
+  }
+
+  __ Bind(&loop);
+  __ Lh(TMP, src_base, 0);
+  __ Addiu(src_base, src_base, char_size);
+  __ Addiu(count, count, -1);
+  __ Sh(TMP, dest_base, 0);
+  __ Addiu(dest_base, dest_base, char_size);
+  __ Bnez(count, &loop);
+
+  __ Bind(slow_path->GetExitLabel());
+}
+
 // Unimplemented intrinsics.
 
 UNIMPLEMENTED_INTRINSIC(MIPS, MathCeil)
@@ -2951,7 +3144,6 @@
 UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeCASLong)
 
 UNIMPLEMENTED_INTRINSIC(MIPS, ReferenceGetReferent)
-UNIMPLEMENTED_INTRINSIC(MIPS, SystemArrayCopyChar)
 UNIMPLEMENTED_INTRINSIC(MIPS, SystemArrayCopy)
 
 UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOf);
diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc
index b4ea20b..81566c4 100644
--- a/dex2oat/dex2oat.cc
+++ b/dex2oat/dex2oat.cc
@@ -2931,7 +2931,7 @@
   // time (bug 10645725) unless we're a debug build or running on valgrind. Note: The Dex2Oat class
   // should not destruct the runtime in this case.
   if (!art::kIsDebugBuild && (RUNNING_ON_MEMORY_TOOL == 0)) {
-    exit(result);
+    _exit(result);
   }
   return result;
 }
diff --git a/dexlayout/dexlayout.cc b/dexlayout/dexlayout.cc
index 0536f322..0e0d66b 100644
--- a/dexlayout/dexlayout.cc
+++ b/dexlayout/dexlayout.cc
@@ -1816,6 +1816,10 @@
       output_location += "/" + dex_file_location + ".new";
     }
     new_file.reset(OS::CreateEmptyFile(output_location.c_str()));
+    if (new_file == nullptr) {
+      LOG(ERROR) << "Could not create dex writer output file: " << output_location;
+      return;
+    }
     ftruncate(new_file->Fd(), header_->FileSize());
     mem_map_.reset(MemMap::MapFile(header_->FileSize(), PROT_READ | PROT_WRITE, MAP_SHARED,
         new_file->Fd(), 0, /*low_4gb*/ false, output_location.c_str(), &error_msg));
@@ -1825,7 +1829,7 @@
   }
   if (mem_map_ == nullptr) {
     LOG(ERROR) << "Could not create mem map for dex writer output: " << error_msg;
-    if (new_file.get() != nullptr) {
+    if (new_file != nullptr) {
       new_file->Erase();
     }
     return;
diff --git a/runtime/dex_file-inl.h b/runtime/dex_file-inl.h
index e884e39..41db4d8 100644
--- a/runtime/dex_file-inl.h
+++ b/runtime/dex_file-inl.h
@@ -59,11 +59,17 @@
 }
 
 inline const char* DexFile::StringByTypeIdx(dex::TypeIndex idx, uint32_t* unicode_length) const {
+  if (!idx.IsValid()) {
+    return nullptr;
+  }
   const TypeId& type_id = GetTypeId(idx);
   return StringDataAndUtf16LengthByIdx(type_id.descriptor_idx_, unicode_length);
 }
 
 inline const char* DexFile::StringByTypeIdx(dex::TypeIndex idx) const {
+  if (!idx.IsValid()) {
+    return nullptr;
+  }
   const TypeId& type_id = GetTypeId(idx);
   return StringDataByIdx(type_id.descriptor_idx_);
 }
diff --git a/runtime/dex_file.cc b/runtime/dex_file.cc
index 85100ae..688f95b 100644
--- a/runtime/dex_file.cc
+++ b/runtime/dex_file.cc
@@ -590,6 +590,10 @@
 
 void DexFile::InitializeSectionsFromMapList() {
   const MapList* map_list = reinterpret_cast<const MapList*>(begin_ + header_->map_off_);
+  if (header_->map_off_ == 0 || header_->map_off_ > size_) {
+    // Bad offset. The dex file verifier runs after this method and will reject the file.
+    return;
+  }
   const size_t count = map_list->size_;
 
   size_t map_limit = header_->map_off_ + count * sizeof(MapItem);
diff --git a/runtime/dex_file_test.cc b/runtime/dex_file_test.cc
index 9131715..f811287 100644
--- a/runtime/dex_file_test.cc
+++ b/runtime/dex_file_test.cc
@@ -161,6 +161,16 @@
   "ACACAAALABgAAAAAAAAAAACgge4CAABjbGFzc2VzLmRleFVUBQADAWPlV3V4CwABBOQDAQAEiBMA"
   "AFBLBQYAAAAAAwADAPUAAABkBAAAAAA=";
 
+static const char kRawDexBadMapOffset[] =
+  "ZGV4CjAzNQAZKGSz85r+tXJ1I24FYi+FpQtWbXtelAmoAQAAcAAAAHhWNBIAAAAAAAAAAEAwIBAF"
+  "AAAAcAAAAAMAAACEAAAAAQAAAJAAAAAAAAAAAAAAAAIAAACcAAAAAQAAAKwAAADcAAAAzAAAAOQA"
+  "AADsAAAA9AAAAPkAAAANAQAAAgAAAAMAAAAEAAAABAAAAAIAAAAAAAAAAAAAAAAAAAABAAAAAAAA"
+  "AAAAAAABAAAAAQAAAAAAAAABAAAAAAAAABUBAAAAAAAAAQABAAEAAAAQAQAABAAAAHAQAQAAAA4A"
+  "Bjxpbml0PgAGQS5qYXZhAANMQTsAEkxqYXZhL2xhbmcvT2JqZWN0OwABVgABAAcOAAAAAQAAgYAE"
+  "zAEACwAAAAAAAAABAAAAAAAAAAEAAAAFAAAAcAAAAAIAAAADAAAAhAAAAAMAAAABAAAAkAAAAAUA"
+  "AAACAAAAnAAAAAYAAAABAAAArAAAAAEgAAABAAAAzAAAAAIgAAAFAAAA5AAAAAMgAAABAAAAEAEA"
+  "AAAgAAABAAAAFQEAAAAQAAABAAAAIAEAAA==";
+
 static void DecodeAndWriteDexFile(const char* base64, const char* location) {
   // decode base64
   CHECK(base64 != nullptr);
@@ -212,7 +222,8 @@
 
 static std::unique_ptr<const DexFile> OpenDexFileInMemoryBase64(const char* base64,
                                                                 const char* location,
-                                                                uint32_t location_checksum) {
+                                                                uint32_t location_checksum,
+                                                                bool expect_success) {
   CHECK(base64 != nullptr);
   std::vector<uint8_t> dex_bytes = DecodeBase64Vec(base64);
   CHECK_NE(dex_bytes.size(), 0u);
@@ -232,7 +243,11 @@
                                                         /* verify */ true,
                                                         /* verify_checksum */ true,
                                                         &error_message));
-  CHECK(dex_file != nullptr) << error_message;
+  if (expect_success) {
+    CHECK(dex_file != nullptr) << error_message;
+  } else {
+    CHECK(dex_file == nullptr) << "Expected dex file open to fail.";
+  }
   return dex_file;
 }
 
@@ -282,7 +297,7 @@
 TEST_F(DexFileTest, HeaderInMemory) {
   ScratchFile tmp;
   std::unique_ptr<const DexFile> raw =
-      OpenDexFileInMemoryBase64(kRawDex, tmp.GetFilename().c_str(), 0x00d87910U);
+      OpenDexFileInMemoryBase64(kRawDex, tmp.GetFilename().c_str(), 0x00d87910U, true);
   ValidateDexFileHeader(std::move(raw));
 }
 
@@ -569,4 +584,18 @@
   EXPECT_EQ(dex_files.size(), 3u);
 }
 
+TEST_F(DexFileTest, OpenDexBadMapOffset) {
+  ScratchFile tmp;
+  std::unique_ptr<const DexFile> raw =
+      OpenDexFileInMemoryBase64(kRawDexBadMapOffset, tmp.GetFilename().c_str(), 0xb3642819U, false);
+  EXPECT_EQ(raw, nullptr);
+}
+
+TEST_F(DexFileTest, GetStringWithNoIndex) {
+  ScratchFile tmp;
+  std::unique_ptr<const DexFile> raw(OpenDexFileBase64(kRawDex, tmp.GetFilename().c_str()));
+  dex::TypeIndex idx;
+  EXPECT_EQ(raw->StringByTypeIdx(idx), nullptr);
+}
+
 }  // namespace art
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 24ba52f..bcf5008 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -1108,7 +1108,9 @@
     space::RegionSpace* region_space = collector->RegionSpace();
     CHECK(!region_space->IsInFromSpace(obj)) << "Scanning object " << obj << " in from space";
     VerifyNoFromSpaceRefsFieldVisitor visitor(collector);
-    obj->VisitReferences(visitor, visitor);
+    obj->VisitReferences</*kVisitNativeRoots*/true, kDefaultVerifyFlags, kWithoutReadBarrier>(
+        visitor,
+        visitor);
     if (kUseBakerReadBarrier) {
       CHECK_EQ(obj->GetReadBarrierState(), ReadBarrier::WhiteState())
           << "obj=" << obj << " non-white rb_state " << obj->GetReadBarrierState();
@@ -1232,7 +1234,9 @@
     CHECK(!region_space->IsInFromSpace(obj)) << "Scanning object " << obj << " in from space";
     collector->AssertToSpaceInvariant(nullptr, MemberOffset(0), obj);
     AssertToSpaceInvariantFieldVisitor visitor(collector);
-    obj->VisitReferences(visitor, visitor);
+    obj->VisitReferences</*kVisitNativeRoots*/true, kDefaultVerifyFlags, kWithoutReadBarrier>(
+        visitor,
+        visitor);
   }
 
  private:
@@ -1471,8 +1475,7 @@
     // Add to the live bytes per unevacuated from space. Note this code is always run by the
     // GC-running thread (no synchronization required).
     DCHECK(region_space_bitmap_->Test(to_ref));
-    // Disable the read barrier in SizeOf for performance, which is safe.
-    size_t obj_size = to_ref->SizeOf<kDefaultVerifyFlags, kWithoutReadBarrier>();
+    size_t obj_size = to_ref->SizeOf<kDefaultVerifyFlags>();
     size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment);
     region_space_->AddLiveBytes(to_ref, alloc_size);
   }
@@ -1714,16 +1717,19 @@
 }
 
 // Assert the to-space invariant.
-void ConcurrentCopying::AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset,
+void ConcurrentCopying::AssertToSpaceInvariant(mirror::Object* obj,
+                                               MemberOffset offset,
                                                mirror::Object* ref) {
-  CHECK(heap_->collector_type_ == kCollectorTypeCC) << static_cast<size_t>(heap_->collector_type_);
+  CHECK_EQ(heap_->collector_type_, kCollectorTypeCC);
   if (is_asserting_to_space_invariant_) {
-    if (region_space_->IsInToSpace(ref)) {
+    using RegionType = space::RegionSpace::RegionType;
+    space::RegionSpace::RegionType type = region_space_->GetRegionType(ref);
+    if (type == RegionType::kRegionTypeToSpace) {
       // OK.
       return;
-    } else if (region_space_->IsInUnevacFromSpace(ref)) {
+    } else if (type == RegionType::kRegionTypeUnevacFromSpace) {
       CHECK(IsMarkedInUnevacFromSpace(ref)) << ref;
-    } else if (region_space_->IsInFromSpace(ref)) {
+    } else if (UNLIKELY(type == RegionType::kRegionTypeFromSpace)) {
       // Not OK. Do extra logging.
       if (obj != nullptr) {
         LogFromSpaceRefHolder(obj, offset);
@@ -1888,10 +1894,10 @@
   explicit RefFieldsVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
 
-  void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, bool /* is_static */)
+  void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */)
       const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
-    collector_->Process(obj.Ptr(), offset);
+    collector_->Process(obj, offset);
   }
 
   void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
@@ -2064,7 +2070,7 @@
     AssertToSpaceInvariant(nullptr, MemberOffset(0), java_lang_Object.Ptr());
     CHECK_EQ(byte_size, (java_lang_Object->GetObjectSize<kVerifyNone, kWithoutReadBarrier>()));
     dummy_obj->SetClass(java_lang_Object.Ptr());
-    CHECK_EQ(byte_size, (dummy_obj->SizeOf<kVerifyNone, kWithoutReadBarrier>()));
+    CHECK_EQ(byte_size, (dummy_obj->SizeOf<kVerifyNone>()));
   } else {
     // Use an int array.
     dummy_obj->SetClass(int_array_class);
@@ -2075,7 +2081,7 @@
     CHECK_EQ(dummy_arr->GetLength(), length)
         << "byte_size=" << byte_size << " length=" << length
         << " component_size=" << component_size << " data_offset=" << data_offset;
-    CHECK_EQ(byte_size, (dummy_obj->SizeOf<kVerifyNone, kWithoutReadBarrier>()))
+    CHECK_EQ(byte_size, (dummy_obj->SizeOf<kVerifyNone>()))
         << "byte_size=" << byte_size << " length=" << length
         << " component_size=" << component_size << " data_offset=" << data_offset;
   }
@@ -2142,10 +2148,10 @@
 
 mirror::Object* ConcurrentCopying::Copy(mirror::Object* from_ref) {
   DCHECK(region_space_->IsInFromSpace(from_ref));
-  // No read barrier to avoid nested RB that might violate the to-space
-  // invariant. Note that from_ref is a from space ref so the SizeOf()
-  // call will access the from-space meta objects, but it's ok and necessary.
-  size_t obj_size = from_ref->SizeOf<kDefaultVerifyFlags, kWithoutReadBarrier>();
+  // There must not be a read barrier to avoid nested RB that might violate the to-space invariant.
+  // Note that from_ref is a from space ref so the SizeOf() call will access the from-space meta
+  // objects, but it's ok and necessary.
+  size_t obj_size = from_ref->SizeOf<kDefaultVerifyFlags>();
   size_t region_space_alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment);
   size_t region_space_bytes_allocated = 0U;
   size_t non_moving_space_bytes_allocated = 0U;
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 357541f..a853b98 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -2337,9 +2337,7 @@
     size_t bin_size = object_addr - context->prev_;
     // Add the bin consisting of the end of the previous object to the start of the current object.
     collector->AddBin(bin_size, context->prev_);
-    // Turn off read barrier. ZygoteCompactingCollector doesn't use it (even in the CC build.)
-    context->prev_ = object_addr + RoundUp(obj->SizeOf<kDefaultVerifyFlags, kWithoutReadBarrier>(),
-                                           kObjectAlignment);
+    context->prev_ = object_addr + RoundUp(obj->SizeOf<kDefaultVerifyFlags>(), kObjectAlignment);
   }
 
   void AddBin(size_t size, uintptr_t position) {
@@ -2359,8 +2357,7 @@
 
   virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj)
       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
-    // Turn off read barrier. ZygoteCompactingCollector doesn't use it (even in the CC build.)
-    size_t obj_size = obj->SizeOf<kDefaultVerifyFlags, kWithoutReadBarrier>();
+    size_t obj_size = obj->SizeOf<kDefaultVerifyFlags>();
     size_t alloc_size = RoundUp(obj_size, kObjectAlignment);
     mirror::Object* forward_address;
     // Find the smallest bin which we can move obj in.
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 811f1ea..f83645e 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -513,8 +513,11 @@
   return GetClass<kVerifyFlags>()->IsPhantomReferenceClass();
 }
 
-template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+template<VerifyObjectFlags kVerifyFlags>
 inline size_t Object::SizeOf() {
+  // Read barrier is never required for SizeOf since objects sizes are constant. Reading from-space
+  // values is OK because of that.
+  static constexpr ReadBarrierOption kReadBarrierOption = kWithoutReadBarrier;
   size_t result;
   constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
   if (IsArrayInstance<kVerifyFlags, kReadBarrierOption>()) {
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index f7ab26d..417a22d 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -128,8 +128,7 @@
   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE bool InstanceOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
-           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   size_t SizeOf() REQUIRES_SHARED(Locks::mutator_lock_);
 
   Object* Clone(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
diff --git a/runtime/openjdkjvmti/ti_heap.cc b/runtime/openjdkjvmti/ti_heap.cc
index 49d9aca..7fc5104 100644
--- a/runtime/openjdkjvmti/ti_heap.cc
+++ b/runtime/openjdkjvmti/ti_heap.cc
@@ -888,8 +888,8 @@
       bool add_to_worklist = ReportRoot(root_obj, info);
       // We use visited_ to mark roots already so we do not need another set.
       if (visited_->find(root_obj) == visited_->end()) {
-        visited_->insert(root_obj);
         if (add_to_worklist) {
+          visited_->insert(root_obj);
           worklist_->push_back(root_obj);
         }
       }
diff --git a/runtime/openjdkjvmti/ti_method.cc b/runtime/openjdkjvmti/ti_method.cc
index bc73029..01bf21d 100644
--- a/runtime/openjdkjvmti/ti_method.cc
+++ b/runtime/openjdkjvmti/ti_method.cc
@@ -61,8 +61,13 @@
 
   art::ScopedObjectAccess soa(art::Thread::Current());
   if (art_method->IsProxyMethod() || art_method->IsAbstract()) {
-    // This isn't specified as an error case, so return 0.
-    *size_ptr = 0;
+    // Use the shorty.
+    art::ArtMethod* base_method = art_method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize);
+    size_t arg_count = art::ArtMethod::NumArgRegisters(base_method->GetShorty());
+    if (!base_method->IsStatic()) {
+      arg_count++;
+    }
+    *size_ptr = static_cast<jint>(arg_count);
     return ERR(NONE);
   }
 
@@ -203,9 +208,9 @@
 
   art::ScopedObjectAccess soa(art::Thread::Current());
   if (art_method->IsProxyMethod() || art_method->IsAbstract()) {
-    // This isn't specified as an error case, so return 0/0.
-    *start_location_ptr = 0;
-    *end_location_ptr = 0;
+    // This isn't specified as an error case, so return -1/-1 as the RI does.
+    *start_location_ptr = -1;
+    *end_location_ptr = -1;
     return ERR(NONE);
   }
 
diff --git a/runtime/read_barrier-inl.h b/runtime/read_barrier-inl.h
index 2b38b2e..c102fb0 100644
--- a/runtime/read_barrier-inl.h
+++ b/runtime/read_barrier-inl.h
@@ -28,12 +28,15 @@
 
 namespace art {
 
+// Disabled for performance reasons.
+static constexpr bool kCheckDebugDisallowReadBarrierCount = false;
+
 template <typename MirrorType, ReadBarrierOption kReadBarrierOption, bool kAlwaysUpdateField>
 inline MirrorType* ReadBarrier::Barrier(
     mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr) {
   constexpr bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
   if (kUseReadBarrier && with_read_barrier) {
-    if (kIsDebugBuild) {
+    if (kCheckDebugDisallowReadBarrierCount) {
       Thread* const self = Thread::Current();
       if (self != nullptr) {
         CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
diff --git a/test/910-methods/expected.txt b/test/910-methods/expected.txt
index 8e6b6e7..c14c6c4 100644
--- a/test/910-methods/expected.txt
+++ b/test/910-methods/expected.txt
@@ -32,19 +32,19 @@
 interface java.util.List
 1025
 Max locals: 0
-Argument size: 0
-Location start: 0
-Location end: 0
+Argument size: 2
+Location start: -1
+Location end: -1
 Is native: false
 Is obsolete: false
 Is synthetic: false
 [run, ()V, null]
-class $Proxy0
+class $Proxy20
 17
 Max locals: 0
-Argument size: 0
-Location start: 0
-Location end: 0
+Argument size: 1
+Location start: -1
+Location end: -1
 Is native: false
 Is obsolete: false
 Is synthetic: false
diff --git a/test/910-methods/src/art/Test910.java b/test/910-methods/src/art/Test910.java
index b3490e9..aa6d13a 100644
--- a/test/910-methods/src/art/Test910.java
+++ b/test/910-methods/src/art/Test910.java
@@ -39,17 +39,6 @@
     testMethod(findSyntheticMethod(), NestedSynthetic.class, false);
   }
 
-  private static Class<?> proxyClass = null;
-
-  private static Class<?> getProxyClass() throws Exception {
-    if (proxyClass != null) {
-      return proxyClass;
-    }
-
-    proxyClass = Proxy.getProxyClass(Main.class.getClassLoader(), new Class[] { Runnable.class });
-    return proxyClass;
-  }
-
   private static void testMethod(String className, String methodName, Class<?>... types)
       throws Exception {
     Class<?> base = Class.forName(className);
@@ -145,4 +134,62 @@
   private static native boolean isMethodNative(Method m);
   private static native boolean isMethodObsolete(Method m);
   private static native boolean isMethodSynthetic(Method m);
+
+  // We need this machinery for a consistent proxy name. Names of proxy classes include a
+  // unique number (derived by counting). This means that a simple call to getProxyClass
+  // depends on the test environment.
+  //
+  // To work around this, we assume that at most twenty proxies have been created before
+  // the test is run, and canonicalize on "$Proxy20". We add infrastructure to create
+  // as many proxy classes but cycling through subsets of the test-provided interfaces
+  // I0...I4.
+  //
+  //
+  // (This is made under the judgment that we do not want to have proxy-specific behavior
+  //  for testMethod.)
+
+  private static Class<?> proxyClass = null;
+
+  private static Class<?> getProxyClass() throws Exception {
+    if (proxyClass != null) {
+      return proxyClass;
+    }
+
+    for (int i = 1; i <= 21; i++) {
+      proxyClass = createProxyClass(i);
+      String name = proxyClass.getName();
+      if (name.equals("$Proxy20")) {
+        return proxyClass;
+      }
+    }
+    return proxyClass;
+  }
+
+  private static Class<?> createProxyClass(int i) throws Exception {
+    int count = Integer.bitCount(i);
+    Class<?>[] input = new Class<?>[count + 1];
+    input[0] = Runnable.class;
+    int inputIndex = 1;
+    int bitIndex = 0;
+    while (i != 0) {
+        if ((i & 1) != 0) {
+            input[inputIndex++] = Class.forName("art.Test910$I" + bitIndex);
+        }
+        i >>>= 1;
+        bitIndex++;
+    }
+    return Proxy.getProxyClass(Test910.class.getClassLoader(), input);
+  }
+
+  // Need this for the proxy naming.
+  public static interface I0 {
+  }
+  public static interface I1 {
+  }
+  public static interface I2 {
+  }
+  public static interface I3 {
+  }
+  public static interface I4 {
+  }
 }
diff --git a/test/913-heaps/expected.txt b/test/913-heaps/expected.txt
index 2a183ee..46e8171 100644
--- a/test/913-heaps/expected.txt
+++ b/test/913-heaps/expected.txt
@@ -1,9 +1,12 @@
 ---
 true true
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=2,method=doFollowReferencesTestNonRoot,vreg=13,location= 32])--> 1@1000 [size=16, length=-1]
-root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=132, length=-1]
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=136, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
 0@0 --(array-element@0)--> 1@1000 [size=16, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 1002@0 --(interface)--> 2001@0 [size=124, length=-1]
 1002@0 --(superclass)--> 1001@0 [size=123, length=-1]
@@ -40,11 +43,14 @@
 ---
 root@root --(jni-global)--> 1@1000 [size=16, length=-1]
 root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 1@1000 [size=16, length=-1]
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=1,method=doFollowReferencesTestImpl,vreg=13,location= 10])--> 1@1000 [size=16, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=1,method=doFollowReferencesTestImpl,vreg=5,location= 10])--> 1@1000 [size=16, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=2,method=doFollowReferencesTestRoot,vreg=4,location= 19])--> 1@1000 [size=16, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
 root@root --(thread)--> 1@1000 [size=16, length=-1]
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 1002@0 --(interface)--> 2001@0 [size=124, length=-1]
 1002@0 --(superclass)--> 1001@0 [size=123, length=-1]
@@ -79,18 +85,21 @@
 5@1002 --(field@9)--> 1@1000 [size=16, length=-1]
 6@1000 --(class)--> 1000@0 [size=123, length=-1]
 ---
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
 ---
 3@1001 --(class)--> 1001@0 [size=123, length=-1]
 ---
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
 ---
 3@1001 --(class)--> 1001@0 [size=123, length=-1]
 ---
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=2,method=doFollowReferencesTestNonRoot,vreg=13,location= 32])--> 1@1000 [size=16, length=-1]
-root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=132, length=-1]
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=136, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
 0@0 --(array-element@0)--> 1@1000 [size=16, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 ---
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 3@1001 --(class)--> 1001@0 [size=123, length=-1]
@@ -99,11 +108,14 @@
 ---
 root@root --(jni-global)--> 1@1000 [size=16, length=-1]
 root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 1@1000 [size=16, length=-1]
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=1,method=doFollowReferencesTestImpl,vreg=13,location= 10])--> 1@1000 [size=16, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=1,method=doFollowReferencesTestImpl,vreg=5,location= 10])--> 1@1000 [size=16, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=2,method=doFollowReferencesTestRoot,vreg=4,location= 19])--> 1@1000 [size=16, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
 root@root --(thread)--> 1@1000 [size=16, length=-1]
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 ---
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 3@1001 --(class)--> 1001@0 [size=123, length=-1]
@@ -182,10 +194,13 @@
 ---
 ---
 ---- untagged objects
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=2,method=doFollowReferencesTestNonRoot,vreg=13,location= 32])--> 1@1000 [size=16, length=-1]
-root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=132, length=-1]
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=136, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
 0@0 --(array-element@0)--> 1@1000 [size=16, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 1002@0 --(interface)--> 2001@0 [size=124, length=-1]
 1002@0 --(superclass)--> 1001@0 [size=123, length=-1]
@@ -222,11 +237,14 @@
 ---
 root@root --(jni-global)--> 1@1000 [size=16, length=-1]
 root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 1@1000 [size=16, length=-1]
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=1,method=doFollowReferencesTestImpl,vreg=13,location= 10])--> 1@1000 [size=16, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=1,method=doFollowReferencesTestImpl,vreg=5,location= 10])--> 1@1000 [size=16, length=-1]
 root@root --(stack-local[id=1,tag=3000,depth=2,method=doFollowReferencesTestRoot,vreg=4,location= 19])--> 1@1000 [size=16, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
 root@root --(thread)--> 1@1000 [size=16, length=-1]
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 1002@0 --(interface)--> 2001@0 [size=124, length=-1]
 1002@0 --(superclass)--> 1001@0 [size=123, length=-1]
@@ -262,8 +280,11 @@
 6@1000 --(class)--> 1000@0 [size=123, length=-1]
 ---
 ---- tagged classes
-root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=132, length=-1]
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=3,method=doFollowReferencesTest,vreg=1,location= 28])--> 3000@0 [size=136, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 1002@0 --(interface)--> 2001@0 [size=124, length=-1]
 1002@0 --(superclass)--> 1001@0 [size=123, length=-1]
@@ -286,7 +307,10 @@
 5@1002 --(class)--> 1002@0 [size=123, length=-1]
 6@1000 --(class)--> 1000@0 [size=123, length=-1]
 ---
-root@root --(thread)--> 3000@0 [size=132, length=-1]
+root@root --(jni-local[id=1,tag=3000,depth=0,method=followReferences])--> 3000@0 [size=136, length=-1]
+root@root --(stack-local[id=1,tag=3000,depth=5,method=run,vreg=2,location= 0])--> 3000@0 [size=136, length=-1]
+root@root --(thread)--> 3000@0 [size=136, length=-1]
+0@0 --(array-element@0)--> 3000@0 [size=136, length=-1]
 1001@0 --(superclass)--> 1000@0 [size=123, length=-1]
 1002@0 --(interface)--> 2001@0 [size=124, length=-1]
 1002@0 --(superclass)--> 1001@0 [size=123, length=-1]
diff --git a/test/913-heaps/heaps.cc b/test/913-heaps/heaps.cc
index 6a06b29..a1b76d5 100644
--- a/test/913-heaps/heaps.cc
+++ b/test/913-heaps/heaps.cc
@@ -19,40 +19,35 @@
 #include <string.h>
 
 #include <iostream>
+#include <sstream>
 #include <vector>
 
 #include "android-base/macros.h"
 #include "android-base/logging.h"
 #include "android-base/stringprintf.h"
 
-#include "jit/jit.h"
 #include "jni.h"
-#include "native_stack_dump.h"
 #include "jvmti.h"
-#include "runtime.h"
-#include "scoped_thread_state_change-inl.h"
-#include "thread-inl.h"
-#include "thread_list.h"
 
 // Test infrastructure
 #include "jni_helper.h"
 #include "jvmti_helper.h"
 #include "test_env.h"
+#include "ti_utf.h"
 
 namespace art {
 namespace Test913Heaps {
 
 using android::base::StringPrintf;
 
+#define FINAL final
+#define OVERRIDE override
+#define UNREACHABLE  __builtin_unreachable
+
 extern "C" JNIEXPORT void JNICALL Java_art_Test913_forceGarbageCollection(
-    JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED) {
+    JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
   jvmtiError ret = jvmti_env->ForceGarbageCollection();
-  if (ret != JVMTI_ERROR_NONE) {
-    char* err;
-    jvmti_env->GetErrorName(ret, &err);
-    printf("Error forcing a garbage collection: %s\n", err);
-    jvmti_env->Deallocate(reinterpret_cast<unsigned char*>(err));
-  }
+  JvmtiErrorToException(env, jvmti_env, ret);
 }
 
 class IterationConfig {
@@ -92,7 +87,8 @@
                         user_data);
 }
 
-static bool Run(jint heap_filter,
+static bool Run(JNIEnv* env,
+                jint heap_filter,
                 jclass klass_filter,
                 jobject initial_object,
                 IterationConfig* config) {
@@ -105,14 +101,7 @@
                                                initial_object,
                                                &callbacks,
                                                config);
-  if (ret != JVMTI_ERROR_NONE) {
-    char* err;
-    jvmti_env->GetErrorName(ret, &err);
-    printf("Failure running FollowReferences: %s\n", err);
-    jvmti_env->Deallocate(reinterpret_cast<unsigned char*>(err));
-    return false;
-  }
-  return true;
+  return !JvmtiErrorToException(env, jvmti_env, ret);
 }
 
 extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferences(
@@ -142,6 +131,23 @@
                 jint length,
                 void* user_data ATTRIBUTE_UNUSED) OVERRIDE {
       jlong tag = *tag_ptr;
+
+      // Ignore any jni-global roots with untagged classes. These can be from the environment,
+      // or the JIT.
+      if (reference_kind == JVMTI_HEAP_REFERENCE_JNI_GLOBAL && class_tag == 0) {
+        return 0;
+      }
+      // Ignore classes (1000-1002@0) for thread objects. These can be held by the JIT.
+      if (reference_kind == JVMTI_HEAP_REFERENCE_THREAD && class_tag == 0 &&
+              (1000 <= *tag_ptr &&  *tag_ptr <= 1002)) {
+        return 0;
+      }
+      // Ignore stack-locals of untagged threads. That is the environment.
+      if (reference_kind == JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
+          reference_info->stack_local.thread_tag != 3000) {
+        return 0;
+      }
+
       // Only check tagged objects.
       if (tag == 0) {
         return JVMTI_VISIT_OBJECTS;
@@ -201,10 +207,6 @@
                                   reference_info,
                                   adapted_size,
                                   length));
-
-      if (reference_kind == JVMTI_HEAP_REFERENCE_THREAD && *tag_ptr == 1000) {
-        DumpStacks();
-      }
     }
 
     std::vector<std::string> GetLines() const {
@@ -259,9 +261,15 @@
         if (info_.jni_local.method != nullptr) {
           jvmti_env->GetMethodName(info_.jni_local.method, &name, nullptr, nullptr);
         }
+        // Normalize the thread id, as this depends on the number of other threads
+        // and which thread is running the test. Should be:
+        //   jlong thread_id = info_.jni_local.thread_id;
+        // TODO: A pre-pass before the test should be able fetch this number, so it can
+        //       be compared explicitly.
+        jlong thread_id = 1;
         std::string ret = StringPrintf("jni-local[id=%" PRId64 ",tag=%" PRId64 ",depth=%d,"
                                        "method=%s]",
-                                       info_.jni_local.thread_id,
+                                       thread_id,
                                        info_.jni_local.thread_tag,
                                        info_.jni_local.depth,
                                        name == nullptr ? "<null>" : name);
@@ -284,14 +292,8 @@
                         jlong size,
                         jint length,
                         const jvmtiHeapReferenceInfo* reference_info)
-          REQUIRES_SHARED(Locks::mutator_lock_)
           : Elem(referrer, referree, size, length) {
         memcpy(&info_, reference_info, sizeof(jvmtiHeapReferenceInfo));
-        // Debug stack trace for failure condition. Remove when done.
-        if (info_.stack_local.depth == 3 && info_.stack_local.slot == 13) {
-          DumpNativeStack(std::cerr, GetTid());
-          Thread::Current()->DumpJavaStack(std::cerr, false, false);
-        }
       }
 
      protected:
@@ -300,9 +302,15 @@
         if (info_.stack_local.method != nullptr) {
           jvmti_env->GetMethodName(info_.stack_local.method, &name, nullptr, nullptr);
         }
+        // Normalize the thread id, as this depends on the number of other threads
+        // and which thread is running the test. Should be:
+        //   jlong thread_id = info_.stack_local.thread_id;
+        // TODO: A pre-pass before the test should be able fetch this number, so it can
+        //       be compared explicitly.
+        jlong thread_id = 1;
         std::string ret = StringPrintf("stack-local[id=%" PRId64 ",tag=%" PRId64 ",depth=%d,"
                                        "method=%s,vreg=%d,location=% " PRId64 "]",
-                                       info_.stack_local.thread_id,
+                                       thread_id,
                                        info_.stack_local.thread_tag,
                                        info_.stack_local.depth,
                                        name == nullptr ? "<null>" : name,
@@ -361,7 +369,13 @@
                                                         tmp));
         }
         case JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT: {
-          std::string tmp = StringPrintf("array-element@%d", reference_info->array.index);
+          jint index = reference_info->array.index;
+          // Normalize if it's "0@0" -> "3000@1".
+          // TODO: A pre-pass could probably give us this index to check explicitly.
+          if (referrer == "0@0" && referree == "3000@0") {
+            index = 0;
+          }
+          std::string tmp = StringPrintf("array-element@%d", index);
           return std::unique_ptr<Elem>(new StringElement(referrer,
                                                          referree,
                                                          size,
@@ -459,16 +473,6 @@
       UNREACHABLE();
     }
 
-    static void DumpStacks() NO_THREAD_SAFETY_ANALYSIS {
-      auto dump_function = [](art::Thread* t, void* data ATTRIBUTE_UNUSED) {
-        std::string name;
-        t->GetThreadName(name);
-        LOG(ERROR) << name;
-        art::DumpNativeStack(LOG_STREAM(ERROR), t->GetTid());
-      };
-      art::Runtime::Current()->GetThreadList()->ForEach(dump_function, nullptr);
-    }
-
     jint counter_;
     const jint stop_after_;
     const jint follow_set_;
@@ -476,8 +480,6 @@
     std::vector<std::unique_ptr<Elem>> lines_;
   };
 
-  jit::ScopedJitSuspend sjs;  // Wait to avoid JIT influence (e.g., JNI globals).
-
   // If jniRef isn't null, add a local and a global ref.
   ScopedLocalRef<jobject> jni_local_ref(env, nullptr);
   jobject jni_global_ref = nullptr;
@@ -487,7 +489,9 @@
   }
 
   PrintIterationConfig config(stop_after, follow_set);
-  Run(heap_filter, klass_filter, initial_object, &config);
+  if (!Run(env, heap_filter, klass_filter, initial_object, &config)) {
+    return nullptr;
+  }
 
   std::vector<std::string> lines = config.GetLines();
   jobjectArray ret = CreateObjectArray(env,
@@ -528,10 +532,10 @@
                                             void* user_data) {
       FindStringCallbacks* p = reinterpret_cast<FindStringCallbacks*>(user_data);
       if (*tag_ptr != 0) {
-        size_t utf_byte_count = CountUtf8Bytes(value, value_length);
+        size_t utf_byte_count = ti::CountUtf8Bytes(value, value_length);
         std::unique_ptr<char[]> mod_utf(new char[utf_byte_count + 1]);
         memset(mod_utf.get(), 0, utf_byte_count + 1);
-        ConvertUtf16ToModifiedUtf8(mod_utf.get(), utf_byte_count, value, value_length);
+        ti::ConvertUtf16ToModifiedUtf8(mod_utf.get(), utf_byte_count, value, value_length);
         p->data.push_back(android::base::StringPrintf("%" PRId64 "@%" PRId64 " (%" PRId64 ", '%s')",
                                                       *tag_ptr,
                                                       class_tag,
diff --git a/test/913-heaps/src/art/Test913.java b/test/913-heaps/src/art/Test913.java
index c54ecb0..2eb05ef 100644
--- a/test/913-heaps/src/art/Test913.java
+++ b/test/913-heaps/src/art/Test913.java
@@ -21,12 +21,34 @@
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.concurrent.CountDownLatch;
 
 public class Test913 {
   public static void run() throws Exception {
     Main.bindAgentJNIForClass(Test913.class);
 
     doTest();
+
+    // Use a countdown latch for synchronization, as join() will introduce more roots.
+    final CountDownLatch cdl1 = new CountDownLatch(1);
+
+    // Run the follow-references tests on a dedicated thread so we know the specific Thread type.
+    Thread t = new Thread() {
+      @Override
+      public void run() {
+        try {
+          Test913.runFollowReferences();
+        } catch (Exception e) {
+          throw new RuntimeException(e);
+        }
+        cdl1.countDown();
+      }
+    };
+    t.start();
+    cdl1.await();
+  }
+
+  public static void runFollowReferences() throws Exception {
     new TestConfig().doFollowReferencesTest();
 
     Runtime.getRuntime().gc();
@@ -481,7 +503,8 @@
           if (currentHead == null) {
             currentHead = referrer;
           } else {
-            if (!currentHead.equals(referrer)) {
+            // Ignore 0@0, as it can happen at any time (as it stands for all other objects).
+            if (!currentHead.equals(referrer) && !referrer.equals("0@0")) {
               completedReferrers.add(currentHead);
               currentHead = referrer;
               if (completedReferrers.contains(referrer)) {
diff --git a/test/Android.bp b/test/Android.bp
index 538fac4..033c4fc 100644
--- a/test/Android.bp
+++ b/test/Android.bp
@@ -260,6 +260,7 @@
         "908-gc-start-finish/gc_callbacks.cc",
         "910-methods/methods.cc",
         "911-get-stack-trace/stack_trace.cc",
+        "913-heaps/heaps.cc",
         "918-fields/fields.cc",
         "920-objects/objects.cc",
         "922-properties/properties.cc",
@@ -293,7 +294,6 @@
         "901-hello-ti-agent/basics.cc",
         "909-attach-agent/attach.cc",
         "912-classes/classes.cc",
-        "913-heaps/heaps.cc",
         "936-search-onload/search_onload.cc",
         "944-transform-classloaders/classloader.cc",
         "945-obsolete-native/obsolete_native.cc",
diff --git a/test/knownfailures.json b/test/knownfailures.json
index 54786bb..29c1e8b 100644
--- a/test/knownfailures.json
+++ b/test/knownfailures.json
@@ -601,20 +601,20 @@
     },
     {
         "tests": [
-            "008-exceptions",
             "031-class-attributes",
-            "034-call-null",
-            "038-inner-null",
-            "054-uncaught",
-            "122-npe",
-            "439-npe",
-            "911-get-stack-trace",
-            "946-obsolete-throw"
+            "911-get-stack-trace"
         ],
         "description": [
             "Tests that use annotations and debug data that is not kept around by dexter."
         ],
-        "bug": "b/37240685 & b/37239009",
+        "bug": "b/37239009",
         "variant": "jvmti-stress"
+    },
+    {
+        "tests": "160-read-barrier-stress",
+        "description": ["Disable 160-read-barrier-stress temporarily until we find ",
+                        "a fix to avoid the OOME with Jack. Note that it cannot be compiled ",
+                        "by javac either ('error: code too large')"],
+        "bug": "http://b/37335480"
     }
 ]