Merge "Expose pending hidden API warning through VMRuntime"
diff --git a/benchmark/type-check/info.txt b/benchmark/type-check/info.txt
deleted file mode 100644
index d14fb96..0000000
--- a/benchmark/type-check/info.txt
+++ /dev/null
@@ -1 +0,0 @@
-Benchmarks for repeating check-cast and instance-of instructions in a loop.
diff --git a/benchmark/type-check/src/TypeCheckBenchmark.java b/benchmark/type-check/src/TypeCheckBenchmark.java
deleted file mode 100644
index 96904d9..0000000
--- a/benchmark/type-check/src/TypeCheckBenchmark.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-public class TypeCheckBenchmark {
-    public void timeCheckCastLevel1ToLevel1(int count) {
-        Object[] arr = arr1;
-        for (int i = 0; i < count; ++i) {
-            Level1 l1 = (Level1) arr[i & 1023];
-        }
-    }
-
-    public void timeCheckCastLevel2ToLevel1(int count) {
-        Object[] arr = arr2;
-        for (int i = 0; i < count; ++i) {
-            Level1 l1 = (Level1) arr[i & 1023];
-        }
-    }
-
-    public void timeCheckCastLevel3ToLevel1(int count) {
-        Object[] arr = arr3;
-        for (int i = 0; i < count; ++i) {
-            Level1 l1 = (Level1) arr[i & 1023];
-        }
-    }
-
-    public void timeCheckCastLevel9ToLevel1(int count) {
-        Object[] arr = arr9;
-        for (int i = 0; i < count; ++i) {
-            Level1 l1 = (Level1) arr[i & 1023];
-        }
-    }
-
-    public void timeCheckCastLevel9ToLevel2(int count) {
-        Object[] arr = arr9;
-        for (int i = 0; i < count; ++i) {
-            Level2 l2 = (Level2) arr[i & 1023];
-        }
-    }
-
-    public void timeInstanceOfLevel1ToLevel1(int count) {
-        int sum = 0;
-        Object[] arr = arr1;
-        for (int i = 0; i < count; ++i) {
-            if (arr[i & 1023] instanceof Level1) {
-              ++sum;
-            }
-        }
-        result = sum;
-    }
-
-    public void timeInstanceOfLevel2ToLevel1(int count) {
-        int sum = 0;
-        Object[] arr = arr2;
-        for (int i = 0; i < count; ++i) {
-            if (arr[i & 1023] instanceof Level1) {
-              ++sum;
-            }
-        }
-        result = sum;
-    }
-
-    public void timeInstanceOfLevel3ToLevel1(int count) {
-        int sum = 0;
-        Object[] arr = arr3;
-        for (int i = 0; i < count; ++i) {
-            if (arr[i & 1023] instanceof Level1) {
-              ++sum;
-            }
-        }
-        result = sum;
-    }
-
-    public void timeInstanceOfLevel9ToLevel1(int count) {
-        int sum = 0;
-        Object[] arr = arr9;
-        for (int i = 0; i < count; ++i) {
-            if (arr[i & 1023] instanceof Level1) {
-              ++sum;
-            }
-        }
-        result = sum;
-    }
-
-    public void timeInstanceOfLevel9ToLevel2(int count) {
-        int sum = 0;
-        Object[] arr = arr9;
-        for (int i = 0; i < count; ++i) {
-            if (arr[i & 1023] instanceof Level2) {
-              ++sum;
-            }
-        }
-        result = sum;
-    }
-
-    public static Object[] createArray(int level) {
-        try {
-            Class<?>[] ls = {
-                    null,
-                    Level1.class,
-                    Level2.class,
-                    Level3.class,
-                    Level4.class,
-                    Level5.class,
-                    Level6.class,
-                    Level7.class,
-                    Level8.class,
-                    Level9.class,
-            };
-            Class<?> l = ls[level];
-            Object[] array = new Object[1024];
-            for (int i = 0; i < array.length; ++i) {
-                array[i] = l.newInstance();
-            }
-            return array;
-        } catch (Exception unexpected) {
-            throw new Error("Initialization failure!");
-        }
-    }
-    Object[] arr1 = createArray(1);
-    Object[] arr2 = createArray(2);
-    Object[] arr3 = createArray(3);
-    Object[] arr9 = createArray(9);
-    int result;
-}
-
-class Level1 { }
-class Level2 extends Level1 { }
-class Level3 extends Level2 { }
-class Level4 extends Level3 { }
-class Level5 extends Level4 { }
-class Level6 extends Level5 { }
-class Level7 extends Level6 { }
-class Level8 extends Level7 { }
-class Level9 extends Level8 { }
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index 3720dda..60537fd 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -680,8 +680,7 @@
 // TODO: Collect the relevant string indices in parallel, then allocate them sequentially in a
 //       stable order.
 
-static void ResolveConstStrings(ClassLinker* class_linker,
-                                Handle<mirror::DexCache> dex_cache,
+static void ResolveConstStrings(Handle<mirror::DexCache> dex_cache,
                                 const DexFile& dex_file,
                                 const DexFile::CodeItem* code_item)
       REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -690,6 +689,7 @@
     return;
   }
 
+  ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
   for (const DexInstructionPcPair& inst : CodeItemInstructionAccessor(dex_file, code_item)) {
     switch (inst->Opcode()) {
       case Instruction::CONST_STRING:
@@ -737,105 +737,22 @@
           dex_file->StringByTypeIdx(class_def.class_idx_));
       if (!compilation_enabled) {
         // Compilation is skipped, do not resolve const-string in code of this class.
-        // FIXME: Make sure that inlining honors this. b/26687569
+        // TODO: Make sure that inlining honors this.
         continue;
       }
 
       // Direct and virtual methods.
+      int64_t previous_method_idx = -1;
       while (it.HasNextMethod()) {
-        ResolveConstStrings(class_linker, dex_cache, *dex_file, it.GetMethodCodeItem());
-        it.Next();
-      }
-      DCHECK(!it.HasNext());
-    }
-  }
-}
-
-// Initialize type check bit strings for check-cast and instance-of in the code. Done to have
-// deterministic allocation behavior. Right now this is single-threaded for simplicity.
-// TODO: Collect the relevant type indices in parallel, then process them sequentially in a
-//       stable order.
-
-static void InitializeTypeCheckBitstrings(CompilerDriver* driver,
-                                          ClassLinker* class_linker,
-                                          Handle<mirror::DexCache> dex_cache,
-                                          const DexFile& dex_file,
-                                          const DexFile::CodeItem* code_item)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-  if (code_item == nullptr) {
-    // Abstract or native method.
-    return;
-  }
-
-  for (const DexInstructionPcPair& inst : CodeItemInstructionAccessor(dex_file, code_item)) {
-    switch (inst->Opcode()) {
-      case Instruction::CHECK_CAST:
-      case Instruction::INSTANCE_OF: {
-        dex::TypeIndex type_index(
-            (inst->Opcode() == Instruction::CHECK_CAST) ? inst->VRegB_21c() : inst->VRegC_22c());
-        const char* descriptor = dex_file.StringByTypeIdx(type_index);
-        // We currently do not use the bitstring type check for array or final (including
-        // primitive) classes. We may reconsider this in future if it's deemed to be beneficial.
-        // And we cannot use it for classes outside the boot image as we do not know the runtime
-        // value of their bitstring when compiling (it may not even get assigned at runtime).
-        if (descriptor[0] == 'L' && driver->IsImageClass(descriptor)) {
-          ObjPtr<mirror::Class> klass =
-              class_linker->LookupResolvedType(type_index,
-                                               dex_cache.Get(),
-                                               /* class_loader */ nullptr);
-          CHECK(klass != nullptr) << descriptor << " should have been previously resolved.";
-          // Now assign the bitstring if the class is not final. Keep this in sync with sharpening.
-          if (!klass->IsFinal()) {
-            MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
-            SubtypeCheck<ObjPtr<mirror::Class>>::EnsureAssigned(klass);
-          }
+        uint32_t method_idx = it.GetMemberIndex();
+        if (method_idx == previous_method_idx) {
+          // smali can create dex files with two encoded_methods sharing the same method_idx
+          // http://code.google.com/p/smali/issues/detail?id=119
+          it.Next();
+          continue;
         }
-        break;
-      }
-
-      default:
-        break;
-    }
-  }
-}
-
-static void InitializeTypeCheckBitstrings(CompilerDriver* driver,
-                                          const std::vector<const DexFile*>& dex_files,
-                                          TimingLogger* timings) {
-  ScopedObjectAccess soa(Thread::Current());
-  StackHandleScope<1> hs(soa.Self());
-  ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
-  MutableHandle<mirror::DexCache> dex_cache(hs.NewHandle<mirror::DexCache>(nullptr));
-
-  for (const DexFile* dex_file : dex_files) {
-    dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file));
-    TimingLogger::ScopedTiming t("Initialize type check bitstrings", timings);
-
-    size_t class_def_count = dex_file->NumClassDefs();
-    for (size_t class_def_index = 0; class_def_index < class_def_count; ++class_def_index) {
-      const DexFile::ClassDef& class_def = dex_file->GetClassDef(class_def_index);
-
-      const uint8_t* class_data = dex_file->GetClassData(class_def);
-      if (class_data == nullptr) {
-        // empty class, probably a marker interface
-        continue;
-      }
-
-      ClassDataItemIterator it(*dex_file, class_data);
-      it.SkipAllFields();
-
-      bool compilation_enabled = driver->IsClassToCompile(
-          dex_file->StringByTypeIdx(class_def.class_idx_));
-      if (!compilation_enabled) {
-        // Compilation is skipped, do not look for type checks in code of this class.
-        // FIXME: Make sure that inlining honors this. b/26687569
-        continue;
-      }
-
-      // Direct and virtual methods.
-      while (it.HasNextMethod()) {
-        InitializeTypeCheckBitstrings(
-            driver, class_linker, dex_cache, *dex_file, it.GetMethodCodeItem());
+        previous_method_idx = method_idx;
+        ResolveConstStrings(dex_cache, *dex_file, it.GetMethodCodeItem());
         it.Next();
       }
       DCHECK(!it.HasNext());
@@ -937,13 +854,6 @@
 
   UpdateImageClasses(timings);
   VLOG(compiler) << "UpdateImageClasses: " << GetMemoryUsageString(false);
-
-  if (GetCompilerOptions().IsForceDeterminism() && GetCompilerOptions().IsBootImage()) {
-    // Initialize type check bit string used by check-cast and instanceof.
-    // Do this now to have a deterministic image.
-    // Note: This is done after UpdateImageClasses() at it relies on the image classes to be final.
-    InitializeTypeCheckBitstrings(this, dex_files, timings);
-  }
 }
 
 bool CompilerDriver::IsImageClass(const char* descriptor) const {
diff --git a/compiler/driver/compiler_driver.h b/compiler/driver/compiler_driver.h
index 18b1e0e..b51e0de 100644
--- a/compiler/driver/compiler_driver.h
+++ b/compiler/driver/compiler_driver.h
@@ -77,9 +77,6 @@
 class VerificationResults;
 class VerifiedMethod;
 
-// Compile-time flag to enable/disable bitstring type checks.
-static constexpr bool kUseBitstringTypeCheck = true;
-
 enum EntryPointCallingConvention {
   // ABI of invocations to a method's interpreter entry point.
   kInterpreterAbi,
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 2dafbf7..3c5a37f 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -438,8 +438,6 @@
       case TypeCheckKind::kArrayCheck:
       case TypeCheckKind::kUnresolvedCheck:
         return false;
-      case TypeCheckKind::kBitstringCheck:
-        return true;
     }
     LOG(FATAL) << "Unreachable";
     UNREACHABLE();
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index b47a5cf..13bbffa 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -2112,26 +2112,6 @@
   __ Bind(slow_path->GetExitLabel());
 }
 
-void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
-    HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
-  uint32_t path_to_root = check->GetBitstringPathToRoot();
-  uint32_t mask = check->GetBitstringMask();
-  DCHECK(IsPowerOfTwo(mask + 1));
-  size_t mask_bits = WhichPowerOf2(mask + 1);
-
-  if (mask_bits == 16u) {
-    // Load only the bitstring part of the status word.
-    __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
-  } else {
-    // /* uint32_t */ temp = temp->status_
-    __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
-    // Extract the bitstring bits.
-    __ Ubfx(temp, temp, 0, mask_bits);
-  }
-  // Compare the bitstring bits to `path_to_root`.
-  __ Cmp(temp, path_to_root);
-}
-
 void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
   BarrierType type = BarrierAll;
 
@@ -3860,8 +3840,6 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
@@ -3870,13 +3848,7 @@
     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers.
   }
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   // The "out" register is used as a temporary, so it overlaps with the inputs.
   // Note that TypeCheckSlowPathARM64 uses this register too.
   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
@@ -3889,9 +3861,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   Register obj = InputRegisterAt(instruction, 0);
-  Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
-      ? Register()
-      : InputRegisterAt(instruction, 1);
+  Register cls = InputRegisterAt(instruction, 1);
   Location out_loc = locations->Out();
   Register out = OutputRegister(instruction);
   const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
@@ -4077,23 +4047,6 @@
       }
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        out_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, out);
-      __ Cset(out, eq);
-      if (zero.IsLinked()) {
-        __ B(&done);
-      }
-      break;
-    }
   }
 
   if (zero.IsLinked()) {
@@ -4116,13 +4069,7 @@
   LocationSummary* locations =
       new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
   locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
 }
@@ -4132,9 +4079,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   Register obj = InputRegisterAt(instruction, 0);
-  Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
-      ? Register()
-      : InputRegisterAt(instruction, 1);
+  Register cls = InputRegisterAt(instruction, 1);
   const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
   DCHECK_GE(num_temps, 1u);
   DCHECK_LE(num_temps, 3u);
@@ -4315,20 +4260,6 @@
       __ B(ne, &start_loop);
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        temp_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp2_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, temp);
-      __ B(ne, type_check_slow_path->GetEntryLabel());
-      break;
-    }
   }
   __ Bind(&done);
 
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index cc369de..f92c94f 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -264,8 +264,6 @@
  private:
   void GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
                                         vixl::aarch64::Register class_reg);
-  void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
-                                         vixl::aarch64::Register temp);
   void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor);
   void HandleBinaryOp(HBinaryOperation* instr);
 
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 504c647..577fe00 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -2490,12 +2490,8 @@
   }
 
   if (!skip_overflow_check) {
-    // Using r4 instead of IP saves 2 bytes. Start by asserting that r4 is available here.
-    for (vixl32::Register reg : kParameterCoreRegistersVIXL) {
-      DCHECK(!reg.Is(r4));
-    }
-    DCHECK(!kCoreCalleeSaves.Includes(r4));
-    vixl32::Register temp = r4;
+    UseScratchRegisterScope temps(GetVIXLAssembler());
+    vixl32::Register temp = temps.Acquire();
     __ Sub(temp, sp, Operand::From(GetStackOverflowReservedBytes(InstructionSet::kArm)));
     // The load must immediately precede RecordPcInfo.
     ExactAssemblyScope aas(GetVIXLAssembler(),
@@ -7195,67 +7191,6 @@
   __ Bind(slow_path->GetExitLabel());
 }
 
-void InstructionCodeGeneratorARMVIXL::GenerateBitstringTypeCheckCompare(
-    HTypeCheckInstruction* check,
-    vixl32::Register temp,
-    vixl32::FlagsUpdate flags_update) {
-  uint32_t path_to_root = check->GetBitstringPathToRoot();
-  uint32_t mask = check->GetBitstringMask();
-  DCHECK(IsPowerOfTwo(mask + 1));
-  size_t mask_bits = WhichPowerOf2(mask + 1);
-
-  // Note that HInstanceOf shall check for zero value in `temp` but HCheckCast needs
-  // the Z flag for BNE. This is indicated by the `flags_update` parameter.
-  if (mask_bits == 16u) {
-    // Load only the bitstring part of the status word.
-    __ Ldrh(temp, MemOperand(temp, mirror::Class::StatusOffset().Int32Value()));
-    // Check if the bitstring bits are equal to `path_to_root`.
-    if (flags_update == SetFlags) {
-      __ Cmp(temp, path_to_root);
-    } else {
-      __ Sub(temp, temp, path_to_root);
-    }
-  } else {
-    // /* uint32_t */ temp = temp->status_
-    __ Ldr(temp, MemOperand(temp, mirror::Class::StatusOffset().Int32Value()));
-    if (GetAssembler()->ShifterOperandCanHold(SUB, path_to_root)) {
-      // Compare the bitstring bits using SUB.
-      __ Sub(temp, temp, path_to_root);
-      // Shift out bits that do not contribute to the comparison.
-      __ Lsl(flags_update, temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
-    } else if (IsUint<16>(path_to_root)) {
-      if (temp.IsLow()) {
-        // Note: Optimized for size but contains one more dependent instruction than necessary.
-        //       MOVW+SUB(register) would be 8 bytes unless we find a low-reg temporary but the
-        //       macro assembler would use the high reg IP for the constant by default.
-        // Compare the bitstring bits using SUB.
-        __ Sub(temp, temp, path_to_root & 0x00ffu);  // 16-bit SUB (immediate) T2
-        __ Sub(temp, temp, path_to_root & 0xff00u);  // 32-bit SUB (immediate) T3
-        // Shift out bits that do not contribute to the comparison.
-        __ Lsl(flags_update, temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
-      } else {
-        // Extract the bitstring bits.
-        __ Ubfx(temp, temp, 0, mask_bits);
-        // Check if the bitstring bits are equal to `path_to_root`.
-        if (flags_update == SetFlags) {
-          __ Cmp(temp, path_to_root);
-        } else {
-          __ Sub(temp, temp, path_to_root);
-        }
-      }
-    } else {
-      // Shift out bits that do not contribute to the comparison.
-      __ Lsl(temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
-      // Check if the shifted bitstring bits are equal to `path_to_root << (32u - mask_bits)`.
-      if (flags_update == SetFlags) {
-        __ Cmp(temp, path_to_root << (32u - mask_bits));
-      } else {
-        __ Sub(temp, temp, path_to_root << (32u - mask_bits));
-      }
-    }
-  }
-}
-
 HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
     HLoadString::LoadKind desired_string_load_kind) {
   switch (desired_string_load_kind) {
@@ -7447,8 +7382,6 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
@@ -7457,13 +7390,7 @@
     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers.
   }
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   // The "out" register is used as a temporary, so it overlaps with the inputs.
   // Note that TypeCheckSlowPathARM uses this register too.
   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
@@ -7478,9 +7405,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   vixl32::Register obj = InputRegisterAt(instruction, 0);
-  vixl32::Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
-      ? vixl32::Register()
-      : InputRegisterAt(instruction, 1);
+  vixl32::Register cls = InputRegisterAt(instruction, 1);
   Location out_loc = locations->Out();
   vixl32::Register out = OutputRegister(instruction);
   const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
@@ -7720,26 +7645,6 @@
       __ B(slow_path->GetEntryLabel());
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        out_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, out, DontCare);
-      // If `out` is a low reg and we would have another low reg temp, we could
-      // optimize this as RSBS+ADC, see GenerateConditionWithZero().
-      //
-      // Also, in some cases when `out` is a low reg and we're loading a constant to IP
-      // it would make sense to use CMP+MOV+IT+MOV instead of SUB+CLZ+LSR as the code size
-      // would be the same and we would have fewer direct data dependencies.
-      codegen_->GenerateConditionWithZero(kCondEQ, out, out);  // CLZ+LSR
-      break;
-    }
   }
 
   if (done.IsReferenced()) {
@@ -7757,13 +7662,7 @@
   LocationSummary* locations =
       new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
 }
 
@@ -7772,9 +7671,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   vixl32::Register obj = InputRegisterAt(instruction, 0);
-  vixl32::Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
-      ? vixl32::Register()
-      : InputRegisterAt(instruction, 1);
+  vixl32::Register cls = InputRegisterAt(instruction, 1);
   Location temp_loc = locations->GetTemp(0);
   vixl32::Register temp = RegisterFrom(temp_loc);
   const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
@@ -7959,20 +7856,6 @@
       __ B(ne, &start_loop, /* far_target */ false);
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        temp_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp2_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, temp, SetFlags);
-      __ B(ne, type_check_slow_path->GetEntryLabel());
-      break;
-    }
   }
   if (done.IsReferenced()) {
     __ Bind(&done);
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index bd815f4..38570bb 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -322,9 +322,6 @@
   void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor);
   void GenerateClassInitializationCheck(LoadClassSlowPathARMVIXL* slow_path,
                                         vixl32::Register class_reg);
-  void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
-                                         vixl::aarch32::Register temp,
-                                         vixl::aarch32::FlagsUpdate flags_update);
   void GenerateAndConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
   void GenerateOrrConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
   void GenerateEorConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index 2ed0ab7..5c8e46e 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -1929,34 +1929,6 @@
   __ Bind(slow_path->GetExitLabel());
 }
 
-void InstructionCodeGeneratorMIPS::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
-                                                                     Register temp) {
-  uint32_t path_to_root = check->GetBitstringPathToRoot();
-  uint32_t mask = check->GetBitstringMask();
-  DCHECK(IsPowerOfTwo(mask + 1));
-  size_t mask_bits = WhichPowerOf2(mask + 1);
-
-  if (mask_bits == 16u) {
-    // Load only the bitstring part of the status word.
-    __ LoadFromOffset(
-        kLoadUnsignedHalfword, temp, temp, mirror::Class::StatusOffset().Int32Value());
-    // Compare the bitstring bits using XOR.
-    __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
-  } else {
-    // /* uint32_t */ temp = temp->status_
-    __ LoadFromOffset(kLoadWord, temp, temp, mirror::Class::StatusOffset().Int32Value());
-    // Compare the bitstring bits using XOR.
-    if (IsUint<16>(path_to_root)) {
-      __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
-    } else {
-      __ LoadConst32(TMP, path_to_root);
-      __ Xor(temp, temp, TMP);
-    }
-    // Shift out bits that do not contribute to the comparison.
-    __ Sll(temp, temp, 32 - mask_bits);
-  }
-}
-
 void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
   __ Sync(0);  // Only stype 0 is supported.
 }
@@ -3317,20 +3289,12 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
       new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
 }
 
@@ -3339,7 +3303,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   Register obj = obj_loc.AsRegister<Register>();
-  Location cls = locations->InAt(1);
+  Register cls = locations->InAt(1).AsRegister<Register>();
   Location temp_loc = locations->GetTemp(0);
   Register temp = temp_loc.AsRegister<Register>();
   const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
@@ -3389,7 +3353,7 @@
                                         kWithoutReadBarrier);
       // Jump to slow path for throwing the exception or doing a
       // more involved array check.
-      __ Bne(temp, cls.AsRegister<Register>(), slow_path->GetEntryLabel());
+      __ Bne(temp, cls, slow_path->GetEntryLabel());
       break;
     }
 
@@ -3415,7 +3379,7 @@
       // exception.
       __ Beqz(temp, slow_path->GetEntryLabel());
       // Otherwise, compare the classes.
-      __ Bne(temp, cls.AsRegister<Register>(), &loop);
+      __ Bne(temp, cls, &loop);
       break;
     }
 
@@ -3430,7 +3394,7 @@
       // Walk over the class hierarchy to find a match.
       MipsLabel loop;
       __ Bind(&loop);
-      __ Beq(temp, cls.AsRegister<Register>(), &done);
+      __ Beq(temp, cls, &done);
       // /* HeapReference<Class> */ temp = temp->super_class_
       GenerateReferenceLoadOneRegister(instruction,
                                        temp_loc,
@@ -3453,7 +3417,7 @@
                                         maybe_temp2_loc,
                                         kWithoutReadBarrier);
       // Do an exact check.
-      __ Beq(temp, cls.AsRegister<Register>(), &done);
+      __ Beq(temp, cls, &done);
       // Otherwise, we need to check that the object's class is a non-primitive array.
       // /* HeapReference<Class> */ temp = temp->component_type_
       GenerateReferenceLoadOneRegister(instruction,
@@ -3512,21 +3476,7 @@
       // Go to next interface.
       __ Addiu(TMP, TMP, -2);
       // Compare the classes and continue the loop if they do not match.
-      __ Bne(AT, cls.AsRegister<Register>(), &loop);
-      break;
-    }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        temp_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp2_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, temp);
-      __ Bnez(temp, slow_path->GetEntryLabel());
+      __ Bne(AT, cls, &loop);
       break;
     }
   }
@@ -7257,8 +7207,6 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
@@ -7267,13 +7215,7 @@
     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers.
   }
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   // The output does overlap inputs.
   // Note that TypeCheckSlowPathMIPS uses this register too.
   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
@@ -7285,7 +7227,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   Register obj = obj_loc.AsRegister<Register>();
-  Location cls = locations->InAt(1);
+  Register cls = locations->InAt(1).AsRegister<Register>();
   Location out_loc = locations->Out();
   Register out = out_loc.AsRegister<Register>();
   const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
@@ -7315,7 +7257,7 @@
                                         maybe_temp_loc,
                                         kCompilerReadBarrierOption);
       // Classes must be equal for the instanceof to succeed.
-      __ Xor(out, out, cls.AsRegister<Register>());
+      __ Xor(out, out, cls);
       __ Sltiu(out, out, 1);
       break;
     }
@@ -7340,7 +7282,7 @@
                                        kCompilerReadBarrierOption);
       // If `out` is null, we use it for the result, and jump to `done`.
       __ Beqz(out, &done);
-      __ Bne(out, cls.AsRegister<Register>(), &loop);
+      __ Bne(out, cls, &loop);
       __ LoadConst32(out, 1);
       break;
     }
@@ -7356,7 +7298,7 @@
       // Walk over the class hierarchy to find a match.
       MipsLabel loop, success;
       __ Bind(&loop);
-      __ Beq(out, cls.AsRegister<Register>(), &success);
+      __ Beq(out, cls, &success);
       // /* HeapReference<Class> */ out = out->super_class_
       GenerateReferenceLoadOneRegister(instruction,
                                        out_loc,
@@ -7381,7 +7323,7 @@
                                         kCompilerReadBarrierOption);
       // Do an exact check.
       MipsLabel success;
-      __ Beq(out, cls.AsRegister<Register>(), &success);
+      __ Beq(out, cls, &success);
       // Otherwise, we need to check that the object's class is a non-primitive array.
       // /* HeapReference<Class> */ out = out->component_type_
       GenerateReferenceLoadOneRegister(instruction,
@@ -7413,7 +7355,7 @@
       slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS(
           instruction, /* is_fatal */ false);
       codegen_->AddSlowPath(slow_path);
-      __ Bne(out, cls.AsRegister<Register>(), slow_path->GetEntryLabel());
+      __ Bne(out, cls, slow_path->GetEntryLabel());
       __ LoadConst32(out, 1);
       break;
     }
@@ -7445,20 +7387,6 @@
       __ B(slow_path->GetEntryLabel());
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        out_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, out);
-      __ Sltiu(out, out, 1);
-      break;
-    }
   }
 
   __ Bind(&done);
diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h
index ffeb3b0..32b3e42 100644
--- a/compiler/optimizing/code_generator_mips.h
+++ b/compiler/optimizing/code_generator_mips.h
@@ -237,7 +237,6 @@
  private:
   void GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path, Register class_reg);
   void GenerateSuspendCheck(HSuspendCheck* check, HBasicBlock* successor);
-  void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check, Register temp);
   void HandleBinaryOp(HBinaryOperation* operation);
   void HandleCondition(HCondition* instruction);
   void HandleShift(HBinaryOperation* operation);
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index 3ae8a30..bcfe051 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -1775,34 +1775,6 @@
   __ Bind(slow_path->GetExitLabel());
 }
 
-void InstructionCodeGeneratorMIPS64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
-                                                                       GpuRegister temp) {
-  uint32_t path_to_root = check->GetBitstringPathToRoot();
-  uint32_t mask = check->GetBitstringMask();
-  DCHECK(IsPowerOfTwo(mask + 1));
-  size_t mask_bits = WhichPowerOf2(mask + 1);
-
-  if (mask_bits == 16u) {
-    // Load only the bitstring part of the status word.
-    __ LoadFromOffset(
-        kLoadUnsignedHalfword, temp, temp, mirror::Class::StatusOffset().Int32Value());
-    // Compare the bitstring bits using XOR.
-    __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
-  } else {
-    // /* uint32_t */ temp = temp->status_
-    __ LoadFromOffset(kLoadWord, temp, temp, mirror::Class::StatusOffset().Int32Value());
-    // Compare the bitstring bits using XOR.
-    if (IsUint<16>(path_to_root)) {
-      __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
-    } else {
-      __ LoadConst32(TMP, path_to_root);
-      __ Xor(temp, temp, TMP);
-    }
-    // Shift out bits that do not contribute to the comparison.
-    __ Sll(temp, temp, 32 - mask_bits);
-  }
-}
-
 void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
   __ Sync(0);  // only stype 0 is supported
 }
@@ -2872,20 +2844,12 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
       new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
 }
 
@@ -2894,7 +2858,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
-  Location cls = locations->InAt(1);
+  GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
   Location temp_loc = locations->GetTemp(0);
   GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
   const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
@@ -2944,7 +2908,7 @@
                                         kWithoutReadBarrier);
       // Jump to slow path for throwing the exception or doing a
       // more involved array check.
-      __ Bnec(temp, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
+      __ Bnec(temp, cls, slow_path->GetEntryLabel());
       break;
     }
 
@@ -2970,7 +2934,7 @@
       // exception.
       __ Beqzc(temp, slow_path->GetEntryLabel());
       // Otherwise, compare the classes.
-      __ Bnec(temp, cls.AsRegister<GpuRegister>(), &loop);
+      __ Bnec(temp, cls, &loop);
       break;
     }
 
@@ -2985,7 +2949,7 @@
       // Walk over the class hierarchy to find a match.
       Mips64Label loop;
       __ Bind(&loop);
-      __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
+      __ Beqc(temp, cls, &done);
       // /* HeapReference<Class> */ temp = temp->super_class_
       GenerateReferenceLoadOneRegister(instruction,
                                        temp_loc,
@@ -3008,7 +2972,7 @@
                                         maybe_temp2_loc,
                                         kWithoutReadBarrier);
       // Do an exact check.
-      __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
+      __ Beqc(temp, cls, &done);
       // Otherwise, we need to check that the object's class is a non-primitive array.
       // /* HeapReference<Class> */ temp = temp->component_type_
       GenerateReferenceLoadOneRegister(instruction,
@@ -3067,21 +3031,7 @@
       __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
       __ Addiu(TMP, TMP, -2);
       // Compare the classes and continue the loop if they do not match.
-      __ Bnec(AT, cls.AsRegister<GpuRegister>(), &loop);
-      break;
-    }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        temp_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp2_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, temp);
-      __ Bnezc(temp, slow_path->GetEntryLabel());
+      __ Bnec(AT, cls, &loop);
       break;
     }
   }
@@ -5574,8 +5524,6 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
@@ -5584,13 +5532,7 @@
     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers.
   }
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::RequiresRegister());
-  }
+  locations->SetInAt(1, Location::RequiresRegister());
   // The output does overlap inputs.
   // Note that TypeCheckSlowPathMIPS64 uses this register too.
   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
@@ -5602,7 +5544,7 @@
   LocationSummary* locations = instruction->GetLocations();
   Location obj_loc = locations->InAt(0);
   GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
-  Location cls = locations->InAt(1);
+  GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
   Location out_loc = locations->Out();
   GpuRegister out = out_loc.AsRegister<GpuRegister>();
   const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
@@ -5632,7 +5574,7 @@
                                         maybe_temp_loc,
                                         kCompilerReadBarrierOption);
       // Classes must be equal for the instanceof to succeed.
-      __ Xor(out, out, cls.AsRegister<GpuRegister>());
+      __ Xor(out, out, cls);
       __ Sltiu(out, out, 1);
       break;
     }
@@ -5657,7 +5599,7 @@
                                        kCompilerReadBarrierOption);
       // If `out` is null, we use it for the result, and jump to `done`.
       __ Beqzc(out, &done);
-      __ Bnec(out, cls.AsRegister<GpuRegister>(), &loop);
+      __ Bnec(out, cls, &loop);
       __ LoadConst32(out, 1);
       break;
     }
@@ -5673,7 +5615,7 @@
       // Walk over the class hierarchy to find a match.
       Mips64Label loop, success;
       __ Bind(&loop);
-      __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
+      __ Beqc(out, cls, &success);
       // /* HeapReference<Class> */ out = out->super_class_
       GenerateReferenceLoadOneRegister(instruction,
                                        out_loc,
@@ -5698,7 +5640,7 @@
                                         kCompilerReadBarrierOption);
       // Do an exact check.
       Mips64Label success;
-      __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
+      __ Beqc(out, cls, &success);
       // Otherwise, we need to check that the object's class is a non-primitive array.
       // /* HeapReference<Class> */ out = out->component_type_
       GenerateReferenceLoadOneRegister(instruction,
@@ -5730,7 +5672,7 @@
       slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
           instruction, /* is_fatal */ false);
       codegen_->AddSlowPath(slow_path);
-      __ Bnec(out, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
+      __ Bnec(out, cls, slow_path->GetEntryLabel());
       __ LoadConst32(out, 1);
       break;
     }
@@ -5762,20 +5704,6 @@
       __ Bc(slow_path->GetEntryLabel());
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        out_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        maybe_temp_loc,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, out);
-      __ Sltiu(out, out, 1);
-      break;
-    }
   }
 
   __ Bind(&done);
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index 87d5a9c..d479410 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -233,7 +233,6 @@
 
  private:
   void GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path, GpuRegister class_reg);
-  void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check, GpuRegister temp);
   void GenerateSuspendCheck(HSuspendCheck* check, HBasicBlock* successor);
   void HandleBinaryOp(HBinaryOperation* operation);
   void HandleCondition(HCondition* instruction);
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index e85f900..cbe9e0a 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -6234,27 +6234,6 @@
   // No need for memory fence, thanks to the X86 memory model.
 }
 
-void InstructionCodeGeneratorX86::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
-                                                                    Register temp) {
-  uint32_t path_to_root = check->GetBitstringPathToRoot();
-  uint32_t mask = check->GetBitstringMask();
-  DCHECK(IsPowerOfTwo(mask + 1));
-  size_t mask_bits = WhichPowerOf2(mask + 1);
-
-  if ((false) && mask_bits == 16u) {
-    // FIXME: cmpw() erroneously emits the constant as 32 bits instead of 16 bits. b/71853552
-    // Compare the bitstring in memory.
-    __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
-  } else {
-    // /* uint32_t */ temp = temp->status_
-    __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
-    // Compare the bitstring bits using SUB.
-    __ subl(temp, Immediate(path_to_root));
-    // Shift out bits that do not contribute to the comparison.
-    __ shll(temp, Immediate(32u - mask_bits));
-  }
-}
-
 HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
     HLoadString::LoadKind desired_string_load_kind) {
   switch (desired_string_load_kind) {
@@ -6447,8 +6426,6 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
@@ -6457,13 +6434,7 @@
     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers.
   }
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::Any());
-  }
+  locations->SetInAt(1, Location::Any());
   // Note that TypeCheckSlowPathX86 uses this "out" register too.
   locations->SetOut(Location::RequiresRegister());
   // When read barriers are enabled, we need a temporary register for some cases.
@@ -6684,21 +6655,6 @@
       }
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        out_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, out);
-      __ j(kNotEqual, &zero);
-      __ movl(out, Immediate(1));
-      __ jmp(&done);
-      break;
-    }
   }
 
   if (zero.IsLinked()) {
@@ -6725,10 +6681,6 @@
     // Require a register for the interface check since there is a loop that compares the class to
     // a memory address.
     locations->SetInAt(1, Location::RequiresRegister());
-  } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
   } else {
     locations->SetInAt(1, Location::Any());
   }
@@ -6948,19 +6900,6 @@
       __ MaybeUnpoisonHeapReference(cls.AsRegister<Register>());
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        temp_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, temp);
-      __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
-      break;
-    }
   }
   __ Bind(&done);
 
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index 2d14d4c..0082853 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -211,7 +211,6 @@
   // the suspend call.
   void GenerateSuspendCheck(HSuspendCheck* check, HBasicBlock* successor);
   void GenerateClassInitializationCheck(SlowPathCode* slow_path, Register class_reg);
-  void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check, Register temp);
   void HandleBitwiseOperation(HBinaryOperation* instruction);
   void GenerateDivRemIntegral(HBinaryOperation* instruction);
   void DivRemOneOrMinusOne(HBinaryOperation* instruction);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 9f8b1bb..510eec4 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -5440,27 +5440,6 @@
   // No need for memory fence, thanks to the x86-64 memory model.
 }
 
-void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
-                                                                       CpuRegister temp) {
-  uint32_t path_to_root = check->GetBitstringPathToRoot();
-  uint32_t mask = check->GetBitstringMask();
-  DCHECK(IsPowerOfTwo(mask + 1));
-  size_t mask_bits = WhichPowerOf2(mask + 1);
-
-  if ((false) && mask_bits == 16u) {
-    // FIXME: cmpw() erroneously emits the constant as 32 bits instead of 16 bits. b/71853552
-    // Compare the bitstring in memory.
-    __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
-  } else {
-    // /* uint32_t */ temp = temp->status_
-    __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
-    // Compare the bitstring bits using SUB.
-    __ subl(temp, Immediate(path_to_root));
-    // Shift out bits that do not contribute to the comparison.
-    __ shll(temp, Immediate(32u - mask_bits));
-  }
-}
-
 HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
     HLoadClass::LoadKind desired_class_load_kind) {
   switch (desired_class_load_kind) {
@@ -5833,8 +5812,6 @@
     case TypeCheckKind::kInterfaceCheck:
       call_kind = LocationSummary::kCallOnSlowPath;
       break;
-    case TypeCheckKind::kBitstringCheck:
-      break;
   }
 
   LocationSummary* locations =
@@ -5843,13 +5820,7 @@
     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers.
   }
   locations->SetInAt(0, Location::RequiresRegister());
-  if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
-  } else {
-    locations->SetInAt(1, Location::Any());
-  }
+  locations->SetInAt(1, Location::Any());
   // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
   locations->SetOut(Location::RequiresRegister());
   // When read barriers are enabled, we need a temporary register for
@@ -6078,27 +6049,6 @@
       }
       break;
     }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        out_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, out);
-      if (zero.IsLinked()) {
-        __ j(kNotEqual, &zero);
-        __ movl(out, Immediate(1));
-        __ jmp(&done);
-      } else {
-        __ setcc(kEqual, out);
-        // setcc only sets the low byte.
-        __ andl(out, Immediate(1));
-      }
-      break;
-    }
   }
 
   if (zero.IsLinked()) {
@@ -6125,10 +6075,6 @@
     // Require a register for the interface check since there is a loop that compares the class to
     // a memory address.
     locations->SetInAt(1, Location::RequiresRegister());
-  } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
-    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
-    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
-    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
   } else {
     locations->SetInAt(1, Location::Any());
   }
@@ -6315,7 +6261,7 @@
       break;
     }
 
-    case TypeCheckKind::kInterfaceCheck: {
+    case TypeCheckKind::kInterfaceCheck:
       // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
       // We can not get false positives by doing this.
       // /* HeapReference<Class> */ temp = obj->klass_
@@ -6351,20 +6297,6 @@
       // If `cls` was poisoned above, unpoison it.
       __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
       break;
-    }
-
-    case TypeCheckKind::kBitstringCheck: {
-      // /* HeapReference<Class> */ temp = obj->klass_
-      GenerateReferenceLoadTwoRegisters(instruction,
-                                        temp_loc,
-                                        obj_loc,
-                                        class_offset,
-                                        kWithoutReadBarrier);
-
-      GenerateBitstringTypeCheckCompare(instruction, temp);
-      __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
-      break;
-    }
   }
 
   if (done.IsLinked()) {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 97f8ec7..e86123e 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -208,7 +208,6 @@
   // the suspend call.
   void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor);
   void GenerateClassInitializationCheck(SlowPathCode* slow_path, CpuRegister class_reg);
-  void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check, CpuRegister temp);
   void HandleBitwiseOperation(HBinaryOperation* operation);
   void GenerateRemFP(HRem* rem);
   void DivRemOneOrMinusOne(HBinaryOperation* instruction);
diff --git a/compiler/optimizing/graph_checker.cc b/compiler/optimizing/graph_checker.cc
index fbcbe36..c88baa8 100644
--- a/compiler/optimizing/graph_checker.cc
+++ b/compiler/optimizing/graph_checker.cc
@@ -25,11 +25,6 @@
 #include "base/bit_vector-inl.h"
 #include "base/scoped_arena_allocator.h"
 #include "base/scoped_arena_containers.h"
-#include "handle.h"
-#include "mirror/class.h"
-#include "obj_ptr-inl.h"
-#include "scoped_thread_state_change-inl.h"
-#include "subtype_check.h"
 
 namespace art {
 
@@ -553,83 +548,28 @@
   }
 }
 
-void GraphChecker::CheckTypeCheckBitstringInput(HTypeCheckInstruction* check,
-                                                size_t input_pos,
-                                                bool check_value,
-                                                uint32_t expected_value,
-                                                const char* name) {
-  if (!check->InputAt(input_pos)->IsIntConstant()) {
-    AddError(StringPrintf("%s:%d (bitstring) expects a HIntConstant input %zu (%s), not %s:%d.",
-                          check->DebugName(),
-                          check->GetId(),
-                          input_pos,
-                          name,
-                          check->InputAt(2)->DebugName(),
-                          check->InputAt(2)->GetId()));
-  } else if (check_value) {
-    uint32_t actual_value =
-        static_cast<uint32_t>(check->InputAt(input_pos)->AsIntConstant()->GetValue());
-    if (actual_value != expected_value) {
-      AddError(StringPrintf("%s:%d (bitstring) has %s 0x%x, not 0x%x as expected.",
-                            check->DebugName(),
-                            check->GetId(),
-                            name,
-                            actual_value,
-                            expected_value));
-    }
-  }
-}
-
-void GraphChecker::HandleTypeCheckInstruction(HTypeCheckInstruction* check) {
+void GraphChecker::VisitCheckCast(HCheckCast* check) {
   VisitInstruction(check);
   HInstruction* input = check->InputAt(1);
-  if (check->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
-    if (!input->IsNullConstant()) {
-      AddError(StringPrintf("%s:%d (bitstring) expects a HNullConstant as second input, not %s:%d.",
-                            check->DebugName(),
-                            check->GetId(),
-                            input->DebugName(),
-                            input->GetId()));
-    }
-    bool check_values = false;
-    BitString::StorageType expected_path_to_root = 0u;
-    BitString::StorageType expected_mask = 0u;
-    {
-      ScopedObjectAccess soa(Thread::Current());
-      ObjPtr<mirror::Class> klass = check->GetClass().Get();
-      MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
-      SubtypeCheckInfo::State state = SubtypeCheck<ObjPtr<mirror::Class>>::GetState(klass);
-      if (state == SubtypeCheckInfo::kAssigned) {
-        expected_path_to_root =
-            SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootForTarget(klass);
-        expected_mask = SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootMask(klass);
-        check_values = true;
-      } else {
-        AddError(StringPrintf("%s:%d (bitstring) references a class with unassigned bitstring.",
-                              check->DebugName(),
-                              check->GetId()));
-      }
-    }
-    CheckTypeCheckBitstringInput(
-        check, /* input_pos */ 2, check_values, expected_path_to_root, "path_to_root");
-    CheckTypeCheckBitstringInput(check, /* input_pos */ 3, check_values, expected_mask, "mask");
-  } else {
-    if (!input->IsLoadClass()) {
-      AddError(StringPrintf("%s:%d (classic) expects a HLoadClass as second input, not %s:%d.",
-                            check->DebugName(),
-                            check->GetId(),
-                            input->DebugName(),
-                            input->GetId()));
-    }
+  if (!input->IsLoadClass()) {
+    AddError(StringPrintf("%s:%d expects a HLoadClass as second input, not %s:%d.",
+                          check->DebugName(),
+                          check->GetId(),
+                          input->DebugName(),
+                          input->GetId()));
   }
 }
 
-void GraphChecker::VisitCheckCast(HCheckCast* check) {
-  HandleTypeCheckInstruction(check);
-}
-
 void GraphChecker::VisitInstanceOf(HInstanceOf* instruction) {
-  HandleTypeCheckInstruction(instruction);
+  VisitInstruction(instruction);
+  HInstruction* input = instruction->InputAt(1);
+  if (!input->IsLoadClass()) {
+    AddError(StringPrintf("%s:%d expects a HLoadClass as second input, not %s:%d.",
+                          instruction->DebugName(),
+                          instruction->GetId(),
+                          input->DebugName(),
+                          input->GetId()));
+  }
 }
 
 void GraphChecker::HandleLoop(HBasicBlock* loop_header) {
diff --git a/compiler/optimizing/graph_checker.h b/compiler/optimizing/graph_checker.h
index dbedc40..0f0b49d 100644
--- a/compiler/optimizing/graph_checker.h
+++ b/compiler/optimizing/graph_checker.h
@@ -71,12 +71,6 @@
   void VisitTryBoundary(HTryBoundary* try_boundary) OVERRIDE;
   void VisitTypeConversion(HTypeConversion* instruction) OVERRIDE;
 
-  void CheckTypeCheckBitstringInput(HTypeCheckInstruction* check,
-                                    size_t input_pos,
-                                    bool check_value,
-                                    uint32_t expected_value,
-                                    const char* name);
-  void HandleTypeCheckInstruction(HTypeCheckInstruction* instruction);
   void HandleLoop(HBasicBlock* loop_header);
   void HandleBooleanInput(HInstruction* instruction, size_t input_index);
 
diff --git a/compiler/optimizing/graph_visualizer.cc b/compiler/optimizing/graph_visualizer.cc
index 5519121..12c6988 100644
--- a/compiler/optimizing/graph_visualizer.cc
+++ b/compiler/optimizing/graph_visualizer.cc
@@ -389,23 +389,16 @@
     StartAttributeStream("load_kind") << load_string->GetLoadKind();
   }
 
-  void HandleTypeCheckInstruction(HTypeCheckInstruction* check) {
-    StartAttributeStream("check_kind") << check->GetTypeCheckKind();
-    StartAttributeStream("must_do_null_check") << std::boolalpha
-        << check->MustDoNullCheck() << std::noboolalpha;
-    if (check->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
-      StartAttributeStream("path_to_root") << std::hex
-          << "0x" << check->GetBitstringPathToRoot() << std::dec;
-      StartAttributeStream("mask") << std::hex << "0x" << check->GetBitstringMask() << std::dec;
-    }
-  }
-
   void VisitCheckCast(HCheckCast* check_cast) OVERRIDE {
-    HandleTypeCheckInstruction(check_cast);
+    StartAttributeStream("check_kind") << check_cast->GetTypeCheckKind();
+    StartAttributeStream("must_do_null_check") << std::boolalpha
+        << check_cast->MustDoNullCheck() << std::noboolalpha;
   }
 
   void VisitInstanceOf(HInstanceOf* instance_of) OVERRIDE {
-    HandleTypeCheckInstruction(instance_of);
+    StartAttributeStream("check_kind") << instance_of->GetTypeCheckKind();
+    StartAttributeStream("must_do_null_check") << std::boolalpha
+        << instance_of->MustDoNullCheck() << std::noboolalpha;
   }
 
   void VisitArrayLength(HArrayLength* array_length) OVERRIDE {
@@ -655,32 +648,20 @@
           << std::boolalpha << loop_info->IsIrreducible() << std::noboolalpha;
     }
 
-    // For the builder and the inliner, we want to add extra information on HInstructions
-    // that have reference types, and also HInstanceOf/HCheckcast.
     if ((IsPass(HGraphBuilder::kBuilderPassName)
         || IsPass(HInliner::kInlinerPassName))
-        && (instruction->GetType() == DataType::Type::kReference ||
-            instruction->IsInstanceOf() ||
-            instruction->IsCheckCast())) {
-      ReferenceTypeInfo info = (instruction->GetType() == DataType::Type::kReference)
-          ? instruction->IsLoadClass()
-              ? instruction->AsLoadClass()->GetLoadedClassRTI()
-              : instruction->GetReferenceTypeInfo()
-          : instruction->IsInstanceOf()
-              ? instruction->AsInstanceOf()->GetTargetClassRTI()
-              : instruction->AsCheckCast()->GetTargetClassRTI();
+        && (instruction->GetType() == DataType::Type::kReference)) {
+      ReferenceTypeInfo info = instruction->IsLoadClass()
+        ? instruction->AsLoadClass()->GetLoadedClassRTI()
+        : instruction->GetReferenceTypeInfo();
       ScopedObjectAccess soa(Thread::Current());
       if (info.IsValid()) {
         StartAttributeStream("klass")
             << mirror::Class::PrettyDescriptor(info.GetTypeHandle().Get());
-        if (instruction->GetType() == DataType::Type::kReference) {
-          StartAttributeStream("can_be_null")
-              << std::boolalpha << instruction->CanBeNull() << std::noboolalpha;
-        }
+        StartAttributeStream("can_be_null")
+            << std::boolalpha << instruction->CanBeNull() << std::noboolalpha;
         StartAttributeStream("exact") << std::boolalpha << info.IsExact() << std::noboolalpha;
-      } else if (instruction->IsLoadClass() ||
-                 instruction->IsInstanceOf() ||
-                 instruction->IsCheckCast()) {
+      } else if (instruction->IsLoadClass()) {
         StartAttributeStream("klass") << "unresolved";
       } else {
         // The NullConstant may be added to the graph during other passes that happen between
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 0205c6a..64a1ecc 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -1811,6 +1811,29 @@
   }
 }
 
+static TypeCheckKind ComputeTypeCheckKind(Handle<mirror::Class> cls)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+  if (cls == nullptr) {
+    return TypeCheckKind::kUnresolvedCheck;
+  } else if (cls->IsInterface()) {
+    return TypeCheckKind::kInterfaceCheck;
+  } else if (cls->IsArrayClass()) {
+    if (cls->GetComponentType()->IsObjectClass()) {
+      return TypeCheckKind::kArrayObjectCheck;
+    } else if (cls->CannotBeAssignedFromOtherTypes()) {
+      return TypeCheckKind::kExactCheck;
+    } else {
+      return TypeCheckKind::kArrayCheck;
+    }
+  } else if (cls->IsFinal()) {
+    return TypeCheckKind::kExactCheck;
+  } else if (cls->IsAbstract()) {
+    return TypeCheckKind::kAbstractClassCheck;
+  } else {
+    return TypeCheckKind::kClassHierarchyCheck;
+  }
+}
+
 void HInstructionBuilder::BuildLoadString(dex::StringIndex string_index, uint32_t dex_pc) {
   HLoadString* load_string =
       new (allocator_) HLoadString(graph_->GetCurrentMethod(), string_index, *dex_file_, dex_pc);
@@ -1825,8 +1848,22 @@
 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index, uint32_t dex_pc) {
   ScopedObjectAccess soa(Thread::Current());
   const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
-  Handle<mirror::Class> klass = ResolveClass(soa, type_index);
-  bool needs_access_check = LoadClassNeedsAccessCheck(klass);
+  Handle<mirror::ClassLoader> class_loader = dex_compilation_unit_->GetClassLoader();
+  Handle<mirror::Class> klass = handles_->NewHandle(compiler_driver_->ResolveClass(
+      soa, dex_compilation_unit_->GetDexCache(), class_loader, type_index, dex_compilation_unit_));
+
+  bool needs_access_check = true;
+  if (klass != nullptr) {
+    if (klass->IsPublic()) {
+      needs_access_check = false;
+    } else {
+      ObjPtr<mirror::Class> compiling_class = GetCompilingClass();
+      if (compiling_class != nullptr && compiling_class->CanAccess(klass.Get())) {
+        needs_access_check = false;
+      }
+    }
+  }
+
   return BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
 }
 
@@ -1871,83 +1908,25 @@
   return load_class;
 }
 
-Handle<mirror::Class> HInstructionBuilder::ResolveClass(ScopedObjectAccess& soa,
-                                                        dex::TypeIndex type_index) {
-  Handle<mirror::ClassLoader> class_loader = dex_compilation_unit_->GetClassLoader();
-  ObjPtr<mirror::Class> klass = compiler_driver_->ResolveClass(
-      soa, dex_compilation_unit_->GetDexCache(), class_loader, type_index, dex_compilation_unit_);
-  // TODO: Avoid creating excessive handles if the method references the same class repeatedly.
-  // (Use a map on the local_allocator_.)
-  return handles_->NewHandle(klass);
-}
-
-bool HInstructionBuilder::LoadClassNeedsAccessCheck(Handle<mirror::Class> klass) {
-  if (klass == nullptr) {
-    return true;
-  } else if (klass->IsPublic()) {
-    return false;
-  } else {
-    ObjPtr<mirror::Class> compiling_class = GetCompilingClass();
-    return compiling_class == nullptr || !compiling_class->CanAccess(klass.Get());
-  }
-}
-
 void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
                                          uint8_t destination,
                                          uint8_t reference,
                                          dex::TypeIndex type_index,
                                          uint32_t dex_pc) {
   HInstruction* object = LoadLocal(reference, DataType::Type::kReference);
+  HLoadClass* cls = BuildLoadClass(type_index, dex_pc);
 
   ScopedObjectAccess soa(Thread::Current());
-  const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
-  Handle<mirror::Class> klass = ResolveClass(soa, type_index);
-  bool needs_access_check = LoadClassNeedsAccessCheck(klass);
-  TypeCheckKind check_kind = HSharpening::ComputeTypeCheckKind(
-      klass.Get(), code_generator_, compiler_driver_, needs_access_check);
-
-  HInstruction* class_or_null = nullptr;
-  HIntConstant* bitstring_path_to_root = nullptr;
-  HIntConstant* bitstring_mask = nullptr;
-  if (check_kind == TypeCheckKind::kBitstringCheck) {
-    // TODO: Allow using the bitstring check also if we need an access check.
-    DCHECK(!needs_access_check);
-    class_or_null = graph_->GetNullConstant(dex_pc);
-    MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
-    uint32_t path_to_root =
-        SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootForTarget(klass.Get());
-    uint32_t mask = SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootMask(klass.Get());
-    bitstring_path_to_root = graph_->GetIntConstant(static_cast<int32_t>(path_to_root), dex_pc);
-    bitstring_mask = graph_->GetIntConstant(static_cast<int32_t>(mask), dex_pc);
-  } else {
-    class_or_null = BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
-  }
-  DCHECK(class_or_null != nullptr);
-
+  TypeCheckKind check_kind = ComputeTypeCheckKind(cls->GetClass());
   if (instruction.Opcode() == Instruction::INSTANCE_OF) {
-    AppendInstruction(new (allocator_) HInstanceOf(object,
-                                                   class_or_null,
-                                                   check_kind,
-                                                   klass,
-                                                   dex_pc,
-                                                   allocator_,
-                                                   bitstring_path_to_root,
-                                                   bitstring_mask));
+    AppendInstruction(new (allocator_) HInstanceOf(object, cls, check_kind, dex_pc));
     UpdateLocal(destination, current_block_->GetLastInstruction());
   } else {
     DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST);
     // We emit a CheckCast followed by a BoundType. CheckCast is a statement
     // which may throw. If it succeeds BoundType sets the new type of `object`
     // for all subsequent uses.
-    AppendInstruction(
-        new (allocator_) HCheckCast(object,
-                                    class_or_null,
-                                    check_kind,
-                                    klass,
-                                    dex_pc,
-                                    allocator_,
-                                    bitstring_path_to_root,
-                                    bitstring_mask));
+    AppendInstruction(new (allocator_) HCheckCast(object, cls, check_kind, dex_pc));
     AppendInstruction(new (allocator_) HBoundType(object, dex_pc));
     UpdateLocal(reference, current_block_->GetLastInstruction());
   }
diff --git a/compiler/optimizing/instruction_builder.h b/compiler/optimizing/instruction_builder.h
index f788292..4428c53 100644
--- a/compiler/optimizing/instruction_builder.h
+++ b/compiler/optimizing/instruction_builder.h
@@ -39,7 +39,6 @@
 class HBasicBlockBuilder;
 class Instruction;
 class OptimizingCompilerStats;
-class ScopedObjectAccess;
 class SsaBuilder;
 class VariableSizedHandleScope;
 
@@ -233,12 +232,6 @@
                              bool needs_access_check)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  Handle<mirror::Class> ResolveClass(ScopedObjectAccess& soa, dex::TypeIndex type_index)
-      REQUIRES_SHARED(Locks::mutator_lock_);
-
-  bool LoadClassNeedsAccessCheck(Handle<mirror::Class> klass)
-      REQUIRES_SHARED(Locks::mutator_lock_);
-
   // Returns the outer-most compiling method's class.
   ObjPtr<mirror::Class> GetOutermostCompilingClass() const;
 
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index 2538fa3..a42a85d 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -576,9 +576,7 @@
 
 // Returns whether doing a type test between the class of `object` against `klass` has
 // a statically known outcome. The result of the test is stored in `outcome`.
-static bool TypeCheckHasKnownOutcome(ReferenceTypeInfo class_rti,
-                                     HInstruction* object,
-                                     /*out*/bool* outcome) {
+static bool TypeCheckHasKnownOutcome(HLoadClass* klass, HInstruction* object, bool* outcome) {
   DCHECK(!object->IsNullConstant()) << "Null constants should be special cased";
   ReferenceTypeInfo obj_rti = object->GetReferenceTypeInfo();
   ScopedObjectAccess soa(Thread::Current());
@@ -588,6 +586,7 @@
     return false;
   }
 
+  ReferenceTypeInfo class_rti = klass->GetLoadedClassRTI();
   if (!class_rti.IsValid()) {
     // Happens when the loaded class is unresolved.
     return false;
@@ -612,8 +611,8 @@
 
 void InstructionSimplifierVisitor::VisitCheckCast(HCheckCast* check_cast) {
   HInstruction* object = check_cast->InputAt(0);
-  if (check_cast->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck &&
-      check_cast->GetTargetClass()->NeedsAccessCheck()) {
+  HLoadClass* load_class = check_cast->InputAt(1)->AsLoadClass();
+  if (load_class->NeedsAccessCheck()) {
     // If we need to perform an access check we cannot remove the instruction.
     return;
   }
@@ -631,18 +630,15 @@
   // Note: The `outcome` is initialized to please valgrind - the compiler can reorder
   // the return value check with the `outcome` check, b/27651442 .
   bool outcome = false;
-  if (TypeCheckHasKnownOutcome(check_cast->GetTargetClassRTI(), object, &outcome)) {
+  if (TypeCheckHasKnownOutcome(load_class, object, &outcome)) {
     if (outcome) {
       check_cast->GetBlock()->RemoveInstruction(check_cast);
       MaybeRecordStat(stats_, MethodCompilationStat::kRemovedCheckedCast);
-      if (check_cast->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck) {
-        HLoadClass* load_class = check_cast->GetTargetClass();
-        if (!load_class->HasUses()) {
-          // We cannot rely on DCE to remove the class because the `HLoadClass` thinks it can throw.
-          // However, here we know that it cannot because the checkcast was successfull, hence
-          // the class was already loaded.
-          load_class->GetBlock()->RemoveInstruction(load_class);
-        }
+      if (!load_class->HasUses()) {
+        // We cannot rely on DCE to remove the class because the `HLoadClass` thinks it can throw.
+        // However, here we know that it cannot because the checkcast was successfull, hence
+        // the class was already loaded.
+        load_class->GetBlock()->RemoveInstruction(load_class);
       }
     } else {
       // Don't do anything for exceptional cases for now. Ideally we should remove
@@ -653,8 +649,8 @@
 
 void InstructionSimplifierVisitor::VisitInstanceOf(HInstanceOf* instruction) {
   HInstruction* object = instruction->InputAt(0);
-  if (instruction->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck &&
-      instruction->GetTargetClass()->NeedsAccessCheck()) {
+  HLoadClass* load_class = instruction->InputAt(1)->AsLoadClass();
+  if (load_class->NeedsAccessCheck()) {
     // If we need to perform an access check we cannot remove the instruction.
     return;
   }
@@ -677,7 +673,7 @@
   // Note: The `outcome` is initialized to please valgrind - the compiler can reorder
   // the return value check with the `outcome` check, b/27651442 .
   bool outcome = false;
-  if (TypeCheckHasKnownOutcome(instruction->GetTargetClassRTI(), object, &outcome)) {
+  if (TypeCheckHasKnownOutcome(load_class, object, &outcome)) {
     MaybeRecordStat(stats_, MethodCompilationStat::kRemovedInstanceOf);
     if (outcome && can_be_null) {
       // Type test will succeed, we just need a null test.
@@ -690,14 +686,11 @@
     }
     RecordSimplification();
     instruction->GetBlock()->RemoveInstruction(instruction);
-    if (outcome && instruction->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck) {
-      HLoadClass* load_class = instruction->GetTargetClass();
-      if (!load_class->HasUses()) {
-        // We cannot rely on DCE to remove the class because the `HLoadClass` thinks it can throw.
-        // However, here we know that it cannot because the instanceof check was successfull, hence
-        // the class was already loaded.
-        load_class->GetBlock()->RemoveInstruction(load_class);
-      }
+    if (outcome && !load_class->HasUses()) {
+      // We cannot rely on DCE to remove the class because the `HLoadClass` thinks it can throw.
+      // However, here we know that it cannot because the instanceof check was successfull, hence
+      // the class was already loaded.
+      load_class->GetBlock()->RemoveInstruction(load_class);
     }
   }
 }
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 5587f87..91e475d 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -3105,8 +3105,6 @@
       return os << "array_object_check";
     case TypeCheckKind::kArrayCheck:
       return os << "array_check";
-    case TypeCheckKind::kBitstringCheck:
-      return os << "bitstring_check";
     default:
       LOG(FATAL) << "Unknown TypeCheckKind: " << static_cast<int>(rhs);
       UNREACHABLE();
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index a9782a6..43ca2cf 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -5951,7 +5951,8 @@
         special_input_(HUserRecord<HInstruction*>(current_method)),
         type_index_(type_index),
         dex_file_(dex_file),
-        klass_(klass) {
+        klass_(klass),
+        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
     // Referrers class should not need access check. We never inline unverified
     // methods so we can't possibly end up in this situation.
     DCHECK(!is_referrers_class || !needs_access_check);
@@ -5961,7 +5962,6 @@
     SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
     SetPackedFlag<kFlagIsInBootImage>(false);
     SetPackedFlag<kFlagGenerateClInitCheck>(false);
-    SetPackedFlag<kFlagValidLoadedClassRTI>(false);
   }
 
   bool IsClonable() const OVERRIDE { return true; }
@@ -6010,18 +6010,13 @@
   }
 
   ReferenceTypeInfo GetLoadedClassRTI() {
-    if (GetPackedFlag<kFlagValidLoadedClassRTI>()) {
-      // Note: The is_exact flag from the return value should not be used.
-      return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact */ true);
-    } else {
-      return ReferenceTypeInfo::CreateInvalid();
-    }
+    return loaded_class_rti_;
   }
 
-  // Loaded class RTI is marked as valid by RTP if the klass_ is admissible.
-  void SetValidLoadedClassRTI() REQUIRES_SHARED(Locks::mutator_lock_) {
-    DCHECK(klass_ != nullptr);
-    SetPackedFlag<kFlagValidLoadedClassRTI>(true);
+  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
+    // Make sure we only set exact types (the loaded class should never be merged).
+    DCHECK(rti.IsExact());
+    loaded_class_rti_ = rti;
   }
 
   dex::TypeIndex GetTypeIndex() const { return type_index_; }
@@ -6074,8 +6069,7 @@
   static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
   static constexpr size_t kFieldLoadKindSize =
       MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
-  static constexpr size_t kFlagValidLoadedClassRTI = kFieldLoadKind + kFieldLoadKindSize;
-  static constexpr size_t kNumberOfLoadClassPackedBits = kFlagValidLoadedClassRTI + 1;
+  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
   static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
   using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
 
@@ -6103,6 +6097,8 @@
   const DexFile& dex_file_;
 
   Handle<mirror::Class> klass_;
+
+  ReferenceTypeInfo loaded_class_rti_;
 };
 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
 
@@ -6630,143 +6626,49 @@
   kInterfaceCheck,        // No optimization yet when checking against an interface.
   kArrayObjectCheck,      // Can just check if the array is not primitive.
   kArrayCheck,            // No optimization yet when checking against a generic array.
-  kBitstringCheck,        // Compare the type check bitstring.
   kLast = kArrayCheck
 };
 
 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
 
-// Note: HTypeCheckInstruction is just a helper class, not an abstract instruction with an
-// `IsTypeCheckInstruction()`. (New virtual methods in the HInstruction class have a high cost.)
-class HTypeCheckInstruction : public HVariableInputSizeInstruction {
+class HInstanceOf FINAL : public HExpression<2> {
  public:
-  HTypeCheckInstruction(HInstruction* object,
-                        HInstruction* target_class_or_null,
-                        TypeCheckKind check_kind,
-                        Handle<mirror::Class> klass,
-                        uint32_t dex_pc,
-                        ArenaAllocator* allocator,
-                        HIntConstant* bitstring_path_to_root,
-                        HIntConstant* bitstring_mask,
-                        SideEffects side_effects)
-      : HVariableInputSizeInstruction(
-          side_effects,
-          dex_pc,
-          allocator,
-          /* number_of_inputs */ check_kind == TypeCheckKind::kBitstringCheck ? 4u : 2u,
-          kArenaAllocTypeCheckInputs),
-        klass_(klass) {
+  HInstanceOf(HInstruction* object,
+              HLoadClass* target_class,
+              TypeCheckKind check_kind,
+              uint32_t dex_pc)
+      : HExpression(DataType::Type::kBool,
+                    SideEffectsForArchRuntimeCalls(check_kind),
+                    dex_pc) {
     SetPackedField<TypeCheckKindField>(check_kind);
     SetPackedFlag<kFlagMustDoNullCheck>(true);
-    SetPackedFlag<kFlagValidTargetClassRTI>(false);
     SetRawInputAt(0, object);
-    SetRawInputAt(1, target_class_or_null);
-    DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_path_to_root != nullptr);
-    DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_mask != nullptr);
-    if (check_kind == TypeCheckKind::kBitstringCheck) {
-      DCHECK(target_class_or_null->IsNullConstant());
-      SetRawInputAt(2, bitstring_path_to_root);
-      SetRawInputAt(3, bitstring_mask);
-    } else {
-      DCHECK(target_class_or_null->IsLoadClass());
-    }
+    SetRawInputAt(1, target_class);
   }
 
   HLoadClass* GetTargetClass() const {
-    DCHECK_NE(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
     HInstruction* load_class = InputAt(1);
     DCHECK(load_class->IsLoadClass());
     return load_class->AsLoadClass();
   }
 
-  uint32_t GetBitstringPathToRoot() const {
-    DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
-    HInstruction* path_to_root = InputAt(2);
-    DCHECK(path_to_root->IsIntConstant());
-    return static_cast<uint32_t>(path_to_root->AsIntConstant()->GetValue());
-  }
-
-  uint32_t GetBitstringMask() const {
-    DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
-    HInstruction* mask = InputAt(3);
-    DCHECK(mask->IsIntConstant());
-    return static_cast<uint32_t>(mask->AsIntConstant()->GetValue());
-  }
-
   bool IsClonable() const OVERRIDE { return true; }
   bool CanBeMoved() const OVERRIDE { return true; }
 
-  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
-    DCHECK(other->IsInstanceOf() || other->IsCheckCast()) << other->DebugName();
-    return GetPackedFields() == down_cast<const HTypeCheckInstruction*>(other)->GetPackedFields();
+  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
+    return true;
   }
 
-  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
-  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
-  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
-  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
-
-  ReferenceTypeInfo GetTargetClassRTI() {
-    if (GetPackedFlag<kFlagValidTargetClassRTI>()) {
-      // Note: The is_exact flag from the return value should not be used.
-      return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact */ true);
-    } else {
-      return ReferenceTypeInfo::CreateInvalid();
-    }
-  }
-
-  // Target class RTI is marked as valid by RTP if the klass_ is admissible.
-  void SetValidTargetClassRTI() REQUIRES_SHARED(Locks::mutator_lock_) {
-    DCHECK(klass_ != nullptr);
-    SetPackedFlag<kFlagValidTargetClassRTI>(true);
-  }
-
-  Handle<mirror::Class> GetClass() const {
-    return klass_;
-  }
-
- protected:
-  DEFAULT_COPY_CONSTRUCTOR(TypeCheckInstruction);
-
- private:
-  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
-  static constexpr size_t kFieldTypeCheckKindSize =
-      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
-  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
-  static constexpr size_t kFlagValidTargetClassRTI = kFlagMustDoNullCheck + 1;
-  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagValidTargetClassRTI + 1;
-  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
-  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
-
-  Handle<mirror::Class> klass_;
-};
-
-class HInstanceOf FINAL : public HTypeCheckInstruction {
- public:
-  HInstanceOf(HInstruction* object,
-              HInstruction* target_class_or_null,
-              TypeCheckKind check_kind,
-              Handle<mirror::Class> klass,
-              uint32_t dex_pc,
-              ArenaAllocator* allocator,
-              HIntConstant* bitstring_path_to_root,
-              HIntConstant* bitstring_mask)
-      : HTypeCheckInstruction(object,
-                              target_class_or_null,
-                              check_kind,
-                              klass,
-                              dex_pc,
-                              allocator,
-                              bitstring_path_to_root,
-                              bitstring_mask,
-                              SideEffectsForArchRuntimeCalls(check_kind)) {}
-
-  DataType::Type GetType() const OVERRIDE { return DataType::Type::kBool; }
-
   bool NeedsEnvironment() const OVERRIDE {
     return CanCallRuntime(GetTypeCheckKind());
   }
 
+  // Used only in code generation.
+  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
+  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
+  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
+  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
+
   static bool CanCallRuntime(TypeCheckKind check_kind) {
     // Mips currently does runtime calls for any other checks.
     return check_kind != TypeCheckKind::kExactCheck;
@@ -6780,6 +6682,15 @@
 
  protected:
   DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
+
+ private:
+  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
+  static constexpr size_t kFieldTypeCheckKindSize =
+      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
+  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
+  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
+  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
+  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
 };
 
 class HBoundType FINAL : public HExpression<1> {
@@ -6829,25 +6740,31 @@
   ReferenceTypeInfo upper_bound_;
 };
 
-class HCheckCast FINAL : public HTypeCheckInstruction {
+class HCheckCast FINAL : public HTemplateInstruction<2> {
  public:
   HCheckCast(HInstruction* object,
-             HInstruction* target_class_or_null,
+             HLoadClass* target_class,
              TypeCheckKind check_kind,
-             Handle<mirror::Class> klass,
-             uint32_t dex_pc,
-             ArenaAllocator* allocator,
-             HIntConstant* bitstring_path_to_root,
-             HIntConstant* bitstring_mask)
-      : HTypeCheckInstruction(object,
-                              target_class_or_null,
-                              check_kind,
-                              klass,
-                              dex_pc,
-                              allocator,
-                              bitstring_path_to_root,
-                              bitstring_mask,
-                              SideEffects::CanTriggerGC()) {}
+             uint32_t dex_pc)
+      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
+    SetPackedField<TypeCheckKindField>(check_kind);
+    SetPackedFlag<kFlagMustDoNullCheck>(true);
+    SetRawInputAt(0, object);
+    SetRawInputAt(1, target_class);
+  }
+
+  HLoadClass* GetTargetClass() const {
+    HInstruction* load_class = InputAt(1);
+    DCHECK(load_class->IsLoadClass());
+    return load_class->AsLoadClass();
+  }
+
+  bool IsClonable() const OVERRIDE { return true; }
+  bool CanBeMoved() const OVERRIDE { return true; }
+
+  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
+    return true;
+  }
 
   bool NeedsEnvironment() const OVERRIDE {
     // Instruction may throw a CheckCastError.
@@ -6856,10 +6773,24 @@
 
   bool CanThrow() const OVERRIDE { return true; }
 
+  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
+  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
+  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
+  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
+
   DECLARE_INSTRUCTION(CheckCast);
 
  protected:
   DEFAULT_COPY_CONSTRUCTOR(CheckCast);
+
+ private:
+  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
+  static constexpr size_t kFieldTypeCheckKindSize =
+      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
+  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
+  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
+  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
+  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
 };
 
 /**
diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h
index a6a2f46..0023265 100644
--- a/compiler/optimizing/optimizing_compiler_stats.h
+++ b/compiler/optimizing/optimizing_compiler_stats.h
@@ -99,7 +99,6 @@
   kConstructorFenceRemovedLSE,
   kConstructorFenceRemovedPFRA,
   kConstructorFenceRemovedCFRE,
-  kBitstringTypeCheck,
   kJitOutOfMemoryForCommit,
   kLastStat
 };
diff --git a/compiler/optimizing/prepare_for_register_allocation.cc b/compiler/optimizing/prepare_for_register_allocation.cc
index 5973339..f843c00 100644
--- a/compiler/optimizing/prepare_for_register_allocation.cc
+++ b/compiler/optimizing/prepare_for_register_allocation.cc
@@ -34,20 +34,6 @@
   }
 }
 
-void PrepareForRegisterAllocation::VisitCheckCast(HCheckCast* check_cast) {
-  // Record only those bitstring type checks that make it to the codegen stage.
-  if (check_cast->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
-    MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck);
-  }
-}
-
-void PrepareForRegisterAllocation::VisitInstanceOf(HInstanceOf* instance_of) {
-  // Record only those bitstring type checks that make it to the codegen stage.
-  if (instance_of->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
-    MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck);
-  }
-}
-
 void PrepareForRegisterAllocation::VisitNullCheck(HNullCheck* check) {
   check->ReplaceWith(check->InputAt(0));
 }
diff --git a/compiler/optimizing/prepare_for_register_allocation.h b/compiler/optimizing/prepare_for_register_allocation.h
index f6e4d3e..2c64f01 100644
--- a/compiler/optimizing/prepare_for_register_allocation.h
+++ b/compiler/optimizing/prepare_for_register_allocation.h
@@ -40,8 +40,6 @@
       "prepare_for_register_allocation";
 
  private:
-  void VisitCheckCast(HCheckCast* check_cast) OVERRIDE;
-  void VisitInstanceOf(HInstanceOf* instance_of) OVERRIDE;
   void VisitNullCheck(HNullCheck* check) OVERRIDE;
   void VisitDivZeroCheck(HDivZeroCheck* check) OVERRIDE;
   void VisitBoundsCheck(HBoundsCheck* check) OVERRIDE;
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index 178d7fd..8bb124e 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -87,7 +87,6 @@
   void VisitDeoptimize(HDeoptimize* deopt) OVERRIDE;
   void VisitNewInstance(HNewInstance* new_instance) OVERRIDE;
   void VisitLoadClass(HLoadClass* load_class) OVERRIDE;
-  void VisitInstanceOf(HInstanceOf* load_class) OVERRIDE;
   void VisitClinitCheck(HClinitCheck* clinit_check) OVERRIDE;
   void VisitLoadString(HLoadString* instr) OVERRIDE;
   void VisitLoadException(HLoadException* instr) OVERRIDE;
@@ -172,12 +171,6 @@
                 << "NullCheck " << instr->GetReferenceTypeInfo()
                 << "Input(0) " << instr->InputAt(0)->GetReferenceTypeInfo();
           }
-        } else if (instr->IsInstanceOf()) {
-          HInstanceOf* iof = instr->AsInstanceOf();
-          DCHECK(!iof->GetTargetClassRTI().IsValid() || iof->GetTargetClassRTI().IsExact());
-        } else if (instr->IsCheckCast()) {
-          HCheckCast* check = instr->AsCheckCast();
-          DCHECK(!check->GetTargetClassRTI().IsValid() || check->GetTargetClassRTI().IsExact());
         }
       }
     }
@@ -506,7 +499,8 @@
     return;
   }
 
-  ReferenceTypeInfo class_rti = instanceOf->GetTargetClassRTI();
+  HLoadClass* load_class = instanceOf->InputAt(1)->AsLoadClass();
+  ReferenceTypeInfo class_rti = load_class->GetLoadedClassRTI();
   if (!class_rti.IsValid()) {
     // He have loaded an unresolved class. Don't bother bounding the type.
     return;
@@ -650,20 +644,15 @@
 
 void ReferenceTypePropagation::RTPVisitor::VisitLoadClass(HLoadClass* instr) {
   ScopedObjectAccess soa(Thread::Current());
-  if (IsAdmissible(instr->GetClass().Get())) {
-    instr->SetValidLoadedClassRTI();
+  Handle<mirror::Class> resolved_class = instr->GetClass();
+  if (IsAdmissible(resolved_class.Get())) {
+    instr->SetLoadedClassRTI(ReferenceTypeInfo::Create(
+        resolved_class, /* is_exact */ true));
   }
   instr->SetReferenceTypeInfo(
       ReferenceTypeInfo::Create(handle_cache_->GetClassClassHandle(), /* is_exact */ true));
 }
 
-void ReferenceTypePropagation::RTPVisitor::VisitInstanceOf(HInstanceOf* instr) {
-  ScopedObjectAccess soa(Thread::Current());
-  if (IsAdmissible(instr->GetClass().Get())) {
-    instr->SetValidTargetClassRTI();
-  }
-}
-
 void ReferenceTypePropagation::RTPVisitor::VisitClinitCheck(HClinitCheck* instr) {
   instr->SetReferenceTypeInfo(instr->InputAt(0)->GetReferenceTypeInfo());
 }
@@ -731,6 +720,8 @@
 }
 
 void ReferenceTypePropagation::RTPVisitor::VisitCheckCast(HCheckCast* check_cast) {
+  HLoadClass* load_class = check_cast->InputAt(1)->AsLoadClass();
+  ReferenceTypeInfo class_rti = load_class->GetLoadedClassRTI();
   HBoundType* bound_type = check_cast->GetNext()->AsBoundType();
   if (bound_type == nullptr || bound_type->GetUpperBound().IsValid()) {
     // The next instruction is not an uninitialized BoundType. This must be
@@ -739,14 +730,12 @@
   }
   DCHECK_EQ(bound_type->InputAt(0), check_cast->InputAt(0));
 
-  ScopedObjectAccess soa(Thread::Current());
-  Handle<mirror::Class> klass = check_cast->GetClass();
-  if (IsAdmissible(klass.Get())) {
+  if (class_rti.IsValid()) {
     DCHECK(is_first_run_);
-    check_cast->SetValidTargetClassRTI();
+    ScopedObjectAccess soa(Thread::Current());
     // This is the first run of RTP and class is resolved.
-    bool is_exact = klass->CannotBeAssignedFromOtherTypes();
-    bound_type->SetUpperBound(ReferenceTypeInfo::Create(klass, is_exact),
+    bool is_exact = class_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes();
+    bound_type->SetUpperBound(ReferenceTypeInfo::Create(class_rti.GetTypeHandle(), is_exact),
                               /* CheckCast succeeds for nulls. */ true);
   } else {
     // This is the first run of RTP and class is unresolved. Remove the binding.
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index dffef17..1e49411 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -236,75 +236,6 @@
   return load_kind;
 }
 
-static inline bool CanUseTypeCheckBitstring(ObjPtr<mirror::Class> klass,
-                                            CodeGenerator* codegen,
-                                            CompilerDriver* compiler_driver)
-    REQUIRES_SHARED(Locks::mutator_lock_) {
-  DCHECK(!klass->IsProxyClass());
-  DCHECK(!klass->IsArrayClass());
-
-  if (Runtime::Current()->UseJitCompilation()) {
-    // If we're JITting, try to assign a type check bitstring (fall through).
-  } else if (codegen->GetCompilerOptions().IsBootImage()) {
-    const char* descriptor = klass->GetDexFile().StringByTypeIdx(klass->GetDexTypeIndex());
-    if (!compiler_driver->IsImageClass(descriptor)) {
-      return false;
-    }
-    // If the target is a boot image class, try to assign a type check bitstring (fall through).
-    // (If --force-determinism, this was already done; repeating is OK and yields the same result.)
-  } else {
-    // TODO: Use the bitstring also for AOT app compilation if the target class has a bitstring
-    // already assigned in the boot image.
-    return false;
-  }
-
-  // Try to assign a type check bitstring.
-  MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
-  if ((false) &&  // FIXME: Inliner does not respect compiler_driver->IsClassToCompile()
-                  // and we're hitting an unassigned bitstring in dex2oat_image_test. b/26687569
-      kIsDebugBuild &&
-      codegen->GetCompilerOptions().IsBootImage() &&
-      codegen->GetCompilerOptions().IsForceDeterminism()) {
-    SubtypeCheckInfo::State old_state = SubtypeCheck<ObjPtr<mirror::Class>>::GetState(klass);
-    CHECK(old_state == SubtypeCheckInfo::kAssigned || old_state == SubtypeCheckInfo::kOverflowed)
-        << klass->PrettyDescriptor() << "/" << old_state
-        << " in " << codegen->GetGraph()->PrettyMethod();
-  }
-  SubtypeCheckInfo::State state = SubtypeCheck<ObjPtr<mirror::Class>>::EnsureAssigned(klass);
-  return state == SubtypeCheckInfo::kAssigned;
-}
-
-TypeCheckKind HSharpening::ComputeTypeCheckKind(ObjPtr<mirror::Class> klass,
-                                                CodeGenerator* codegen,
-                                                CompilerDriver* compiler_driver,
-                                                bool needs_access_check) {
-  if (klass == nullptr) {
-    return TypeCheckKind::kUnresolvedCheck;
-  } else if (klass->IsInterface()) {
-    return TypeCheckKind::kInterfaceCheck;
-  } else if (klass->IsArrayClass()) {
-    if (klass->GetComponentType()->IsObjectClass()) {
-      return TypeCheckKind::kArrayObjectCheck;
-    } else if (klass->CannotBeAssignedFromOtherTypes()) {
-      return TypeCheckKind::kExactCheck;
-    } else {
-      return TypeCheckKind::kArrayCheck;
-    }
-  } else if (klass->IsFinal()) {  // TODO: Consider using bitstring for final classes.
-    return TypeCheckKind::kExactCheck;
-  } else if (kUseBitstringTypeCheck &&
-             !needs_access_check &&
-             CanUseTypeCheckBitstring(klass, codegen, compiler_driver)) {
-    // TODO: We should not need the `!needs_access_check` check but getting rid of that
-    // requires rewriting some optimizations in instruction simplifier.
-    return TypeCheckKind::kBitstringCheck;
-  } else if (klass->IsAbstract()) {
-    return TypeCheckKind::kAbstractClassCheck;
-  } else {
-    return TypeCheckKind::kClassHierarchyCheck;
-  }
-}
-
 void HSharpening::ProcessLoadString(
     HLoadString* load_string,
     CodeGenerator* codegen,
diff --git a/compiler/optimizing/sharpening.h b/compiler/optimizing/sharpening.h
index fa3e948..6df7d6d 100644
--- a/compiler/optimizing/sharpening.h
+++ b/compiler/optimizing/sharpening.h
@@ -44,10 +44,12 @@
 
   static constexpr const char* kSharpeningPassName = "sharpening";
 
-  // Used by Sharpening and InstructionSimplifier.
-  static void SharpenInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke,
-                                          CodeGenerator* codegen,
-                                          CompilerDriver* compiler_driver);
+  // Used by the builder.
+  static void ProcessLoadString(HLoadString* load_string,
+                                CodeGenerator* codegen,
+                                CompilerDriver* compiler_driver,
+                                const DexCompilationUnit& dex_compilation_unit,
+                                VariableSizedHandleScope* handles);
 
   // Used by the builder and the inliner.
   static HLoadClass::LoadKind ComputeLoadClassKind(HLoadClass* load_class,
@@ -56,19 +58,10 @@
                                                    const DexCompilationUnit& dex_compilation_unit)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  // Used by the builder.
-  static TypeCheckKind ComputeTypeCheckKind(ObjPtr<mirror::Class> klass,
-                                            CodeGenerator* codegen,
-                                            CompilerDriver* compiler_driver,
-                                            bool needs_access_check)
-      REQUIRES_SHARED(Locks::mutator_lock_);
-
-  // Used by the builder.
-  static void ProcessLoadString(HLoadString* load_string,
-                                CodeGenerator* codegen,
-                                CompilerDriver* compiler_driver,
-                                const DexCompilationUnit& dex_compilation_unit,
-                                VariableSizedHandleScope* handles);
+  // Used by Sharpening and InstructionSimplifier.
+  static void SharpenInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke,
+                                          CodeGenerator* codegen,
+                                          CompilerDriver* compiler_driver);
 
  private:
   CodeGenerator* codegen_;
diff --git a/dex2oat/linker/oat_writer.cc b/dex2oat/linker/oat_writer.cc
index b3a92e5..c7e9cda 100644
--- a/dex2oat/linker/oat_writer.cc
+++ b/dex2oat/linker/oat_writer.cc
@@ -372,6 +372,7 @@
     dex_files_(nullptr),
     vdex_size_(0u),
     vdex_dex_files_offset_(0u),
+    vdex_dex_shared_data_offset_(0u),
     vdex_verifier_deps_offset_(0u),
     vdex_quickening_info_offset_(0u),
     oat_size_(0u),
@@ -3363,6 +3364,54 @@
         return false;
       }
     }
+
+    // Write shared dex file data section and fix up the dex file headers.
+    vdex_dex_shared_data_offset_ = vdex_size_;
+    if (dex_container_ != nullptr) {
+      DexContainer::Section* const section = dex_container_->GetDataSection();
+      if (section->Size() > 0) {
+        const uint32_t shared_data_offset = vdex_size_;
+        const off_t existing_offset = out->Seek(0, kSeekCurrent);
+        if (static_cast<uint32_t>(existing_offset) != shared_data_offset) {
+          LOG(ERROR) << "Expected offset " << shared_data_offset << " but got " << existing_offset;
+          return false;
+        }
+        const uint32_t shared_data_size = section->Size();
+        if (!out->WriteFully(section->Begin(), shared_data_size)) {
+          LOG(ERROR) << "Failed to write shared data!";
+          return false;
+        }
+        // Fix up the dex headers to have correct offsets to the data section.
+        for (OatDexFile& oat_dex_file : oat_dex_files_) {
+          // Overwrite the header by reading it, updating the offset, and writing it back out.
+          DexFile::Header header;
+          if (!file->PreadFully(&header, sizeof(header), oat_dex_file.dex_file_offset_)) {
+            LOG(ERROR) << "Failed to read dex header for updating";
+            return false;
+          }
+          CHECK(CompactDexFile::IsMagicValid(header.magic_)) << "Must be compact dex";
+          CHECK_GT(shared_data_offset, oat_dex_file.dex_file_offset_);
+          // Offset is from the dex file base.
+          header.data_off_ = shared_data_offset - oat_dex_file.dex_file_offset_;
+          // The size should already be what part of the data buffer may be used by the dex.
+          CHECK_LE(header.data_size_, shared_data_size);
+          if (!file->PwriteFully(&header, sizeof(header), oat_dex_file.dex_file_offset_)) {
+            LOG(ERROR) << "Failed to write dex header for updating";
+            return false;
+          }
+        }
+        vdex_size_ += shared_data_size;
+        size_dex_file_ += shared_data_size;
+        section->Clear();
+        if (!out->Flush()) {
+          PLOG(ERROR) << "Failed to flush after writing shared dex section.";
+          return false;
+        }
+      }
+      dex_container_.reset();
+    }
+  } else {
+    vdex_dex_shared_data_offset_ =  vdex_size_;
   }
 
   return true;
@@ -3521,20 +3570,23 @@
   options.compact_dex_level_ = compact_dex_level_;
   options.update_checksum_ = true;
   DexLayout dex_layout(options, profile_compilation_info_, /*file*/ nullptr, /*header*/ nullptr);
-  std::unique_ptr<DexContainer> out_data;
-  dex_layout.ProcessDexFile(location.c_str(), dex_file.get(), 0, &out_data);
+  dex_layout.ProcessDexFile(location.c_str(), dex_file.get(), 0, &dex_container_);
   oat_dex_file->dex_sections_layout_ = dex_layout.GetSections();
   // Dex layout can affect the size of the dex file, so we update here what we have set
   // when adding the dex file as a source.
   const UnalignedDexFileHeader* header =
-      AsUnalignedDexFileHeader(out_data->GetMainSection()->Begin());
+      AsUnalignedDexFileHeader(dex_container_->GetMainSection()->Begin());
   oat_dex_file->dex_file_size_ = header->file_size_;
   if (!WriteDexFile(out,
                     oat_dex_file,
-                    out_data->GetMainSection()->Begin(),
+                    dex_container_->GetMainSection()->Begin(),
                     /* update_input_vdex */ false)) {
     return false;
   }
+  if (dex_container_ != nullptr) {
+    // Clear the main section in case we write more data into the container.
+    dex_container_->GetMainSection()->Clear();
+  }
   CHECK_EQ(oat_dex_file->dex_file_location_checksum_, dex_file->GetLocationChecksum());
   return true;
 }
@@ -3996,12 +4048,14 @@
   DCHECK_NE(vdex_verifier_deps_offset_, 0u);
   DCHECK_NE(vdex_quickening_info_offset_, 0u);
 
-  size_t dex_section_size = vdex_verifier_deps_offset_ - vdex_dex_files_offset_;
+  size_t dex_section_size = vdex_dex_shared_data_offset_ - vdex_dex_files_offset_;
+  size_t dex_shared_data_size = vdex_verifier_deps_offset_ - vdex_dex_shared_data_offset_;
   size_t verifier_deps_section_size = vdex_quickening_info_offset_ - vdex_verifier_deps_offset_;
   size_t quickening_info_section_size = vdex_size_ - vdex_quickening_info_offset_;
 
   VdexFile::Header vdex_header(oat_dex_files_.size(),
                                dex_section_size,
+                               dex_shared_data_size,
                                verifier_deps_section_size,
                                quickening_info_section_size);
   if (!vdex_out->WriteFully(&vdex_header, sizeof(VdexFile::Header))) {
diff --git a/dex2oat/linker/oat_writer.h b/dex2oat/linker/oat_writer.h
index c08c05a..dfcaafc 100644
--- a/dex2oat/linker/oat_writer.h
+++ b/dex2oat/linker/oat_writer.h
@@ -41,6 +41,7 @@
 class BitVector;
 class CompiledMethod;
 class CompilerDriver;
+class DexContainer;
 class ProfileCompilationInfo;
 class TimingLogger;
 class TypeLookupTable;
@@ -382,6 +383,9 @@
   // Offset of section holding Dex files inside Vdex.
   size_t vdex_dex_files_offset_;
 
+  // Offset of section holding shared dex data section in the Vdex.
+  size_t vdex_dex_shared_data_offset_;
+
   // Offset of section holding VerifierDeps inside Vdex.
   size_t vdex_verifier_deps_offset_;
 
@@ -518,6 +522,9 @@
   // This pointer is only non-null after InitOatCodeDexFiles succeeds.
   std::unique_ptr<OrderedMethodList> ordered_methods_;
 
+  // Container of shared dex data.
+  std::unique_ptr<DexContainer> dex_container_;
+
   DISALLOW_COPY_AND_ASSIGN(OatWriter);
 };
 
diff --git a/dexlayout/compact_dex_writer.cc b/dexlayout/compact_dex_writer.cc
index 2f601b6..6149e75 100644
--- a/dexlayout/compact_dex_writer.cc
+++ b/dexlayout/compact_dex_writer.cc
@@ -34,7 +34,8 @@
 }
 
 CompactDexWriter::Container::Container(bool dedupe_code_items)
-    : code_item_dedupe_(dedupe_code_items, &data_section_) {}
+    : code_item_dedupe_(dedupe_code_items, &data_section_),
+      data_item_dedupe_(/*dedupe*/ true, &data_section_) {}
 
 uint32_t CompactDexWriter::WriteDebugInfoOffsetTable(Stream* stream) {
   const uint32_t start_offset = stream->Tell();
@@ -103,16 +104,45 @@
   return stream->Tell() - start_offset;
 }
 
-uint32_t CompactDexWriter::WriteCodeItem(Stream* stream,
-                                         dex_ir::CodeItem* code_item,
-                                         bool reserve_only) {
+CompactDexWriter::ScopedDataSectionItem::ScopedDataSectionItem(Stream* stream,
+                                                               dex_ir::Item* item,
+                                                               size_t alignment,
+                                                               Deduper* deduper)
+    : stream_(stream),
+      item_(item),
+      alignment_(alignment),
+      deduper_(deduper),
+      start_offset_(stream->Tell()) {
+  stream_->AlignTo(alignment_);
+}
+
+CompactDexWriter::ScopedDataSectionItem::~ScopedDataSectionItem() {
+  // After having written, maybe dedupe the whole code item (excluding padding).
+  const uint32_t deduped_offset = deduper_->Dedupe(start_offset_,
+                                                   stream_->Tell(),
+                                                   item_->GetOffset());
+  // In case we dedupe to something with wrong alignment, just say we didn't dedupe.
+  if (deduped_offset != Deduper::kDidNotDedupe && IsAlignedParam(deduped_offset, alignment_)) {
+    item_->SetOffset(deduped_offset);
+    stream_->Clear(start_offset_, stream_->Tell() - start_offset_);
+    // Undo the offset for all that we wrote since we deduped.
+    stream_->Seek(start_offset_);
+  }
+}
+
+size_t CompactDexWriter::ScopedDataSectionItem::Written() const {
+  return stream_->Tell() - start_offset_;
+}
+
+void CompactDexWriter::WriteCodeItem(Stream* stream,
+                                     dex_ir::CodeItem* code_item,
+                                     bool reserve_only) {
   DCHECK(code_item != nullptr);
   DCHECK(!reserve_only) << "Not supported because of deduping.";
-  const uint32_t start_offset = stream->Tell();
-
-  // Align to minimum requirements, additional alignment requirements are handled below after we
-  // know the preheader size.
-  stream->AlignTo(CompactDexFile::CodeItem::kAlignment);
+  ScopedDataSectionItem data_item(stream,
+                                  code_item,
+                                  CompactDexFile::CodeItem::kAlignment,
+                                  code_item_dedupe_);
 
   CompactDexFile::CodeItem disk_code_item;
 
@@ -146,8 +176,6 @@
     }
   }
 
-  const uint32_t data_start = stream->Tell();
-
   // Write preheader first.
   stream->Write(reinterpret_cast<const uint8_t*>(preheader), preheader_bytes);
   // Registered offset is after the preheader.
@@ -159,20 +187,15 @@
   stream->Write(code_item->Insns(), code_item->InsnsSize() * sizeof(uint16_t));
   // Write the post instruction data.
   WriteCodeItemPostInstructionData(stream, code_item, reserve_only);
+}
 
-  if (compute_offsets_) {
-    // After having written, maybe dedupe the whole code item (excluding padding).
-    const uint32_t deduped_offset = code_item_dedupe_->Dedupe(data_start,
-                                                              stream->Tell(),
-                                                              code_item->GetOffset());
-    if (deduped_offset != Deduper::kDidNotDedupe) {
-      code_item->SetOffset(deduped_offset);
-      // Undo the offset for all that we wrote since we deduped.
-      stream->Seek(start_offset);
-    }
-  }
-
-  return stream->Tell() - start_offset;
+void CompactDexWriter::WriteDebugInfoItem(Stream* stream, dex_ir::DebugInfoItem* debug_info) {
+  ScopedDataSectionItem data_item(stream,
+                                  debug_info,
+                                  SectionAlignment(DexFile::kDexTypeDebugInfoItem),
+                                  data_item_dedupe_);
+  ProcessOffset(stream, debug_info);
+  stream->Write(debug_info->GetDebugInfo(), debug_info->GetDebugInfoSize());
 }
 
 
@@ -283,14 +306,35 @@
   return sizeof(CompactDexFile::Header);
 }
 
+void CompactDexWriter::WriteStringData(Stream* stream, dex_ir::StringData* string_data) {
+  ScopedDataSectionItem data_item(stream,
+                                  string_data,
+                                  SectionAlignment(DexFile::kDexTypeStringDataItem),
+                                  data_item_dedupe_);
+  ProcessOffset(stream, string_data);
+  stream->WriteUleb128(CountModifiedUtf8Chars(string_data->Data()));
+  stream->Write(string_data->Data(), strlen(string_data->Data()));
+  // Skip null terminator (already zeroed out, no need to write).
+  stream->Skip(1);
+}
+
 void CompactDexWriter::Write(DexContainer* output)  {
   CHECK(output->IsCompactDexContainer());
   Container* const container = down_cast<Container*>(output);
   // For now, use the same stream for both data and metadata.
-  Stream stream(output->GetMainSection());
-  Stream* main_stream = &stream;
-  Stream* data_stream = &stream;
+  Stream temp_main_stream(output->GetMainSection());
+  CHECK_EQ(output->GetMainSection()->Size(), 0u);
+  Stream temp_data_stream(output->GetDataSection());
+  Stream* main_stream = &temp_main_stream;
+  Stream* data_stream = &temp_data_stream;
+
+  // We want offset 0 to be reserved for null, seek to the data section alignment or the end of the
+  // section.
+  data_stream->Seek(std::max(
+      static_cast<uint32_t>(output->GetDataSection()->Size()),
+      kDataSectionAlignment));
   code_item_dedupe_ = &container->code_item_dedupe_;
+  data_item_dedupe_ = &container->data_item_dedupe_;
 
   // Starting offset is right after the header.
   main_stream->Seek(GetHeaderSize());
@@ -312,11 +356,9 @@
   WriteCallSiteIds(main_stream, /*reserve_only*/ true);
   WriteMethodHandles(main_stream);
 
-  uint32_t data_offset_ = 0u;
   if (compute_offsets_) {
     // Data section.
     data_stream->AlignTo(kDataSectionAlignment);
-    data_offset_ = data_stream->Tell();
   }
 
   // Write code item first to minimize the space required for encoded methods.
@@ -362,19 +404,9 @@
   } else {
     data_stream->Seek(collection.MapListOffset());
   }
-  GenerateAndWriteMapItems(data_stream);
-  data_stream->AlignTo(kDataSectionAlignment);
 
   // Map items are included in the data section.
-  if (compute_offsets_) {
-    header_->SetDataSize(data_stream->Tell() - data_offset_);
-    if (header_->DataSize() != 0) {
-      // Offset must be zero when the size is zero.
-      header_->SetDataOffset(data_offset_);
-    } else {
-      header_->SetDataOffset(0u);
-    }
-  }
+  GenerateAndWriteMapItems(data_stream);
 
   // Write link data if it exists.
   const std::vector<uint8_t>& link_data = collection.LinkData();
@@ -391,19 +423,39 @@
   // Write debug info offset table last to make dex file verifier happy.
   WriteDebugInfoOffsetTable(data_stream);
 
+  data_stream->AlignTo(kDataSectionAlignment);
+  if (compute_offsets_) {
+    header_->SetDataSize(data_stream->Tell());
+    if (header_->DataSize() != 0) {
+      // Offset must be zero when the size is zero.
+      main_stream->AlignTo(kDataSectionAlignment);
+      // For now, default to saying the data is right after the main stream.
+      header_->SetDataOffset(main_stream->Tell());
+      header_->SetDataOffset(0u);
+    } else {
+      header_->SetDataOffset(0u);
+    }
+  }
+
   // Write header last.
   if (compute_offsets_) {
     header_->SetFileSize(main_stream->Tell());
   }
   WriteHeader(main_stream);
 
+  // Trim sections to make sure they are sized properly.
+  output->GetMainSection()->Resize(header_->FileSize());
+  output->GetDataSection()->Resize(data_stream->Tell());
+
   if (dex_layout_->GetOptions().update_checksum_) {
-    header_->SetChecksum(DexFile::CalculateChecksum(main_stream->Begin(), header_->FileSize()));
+    // Compute the cdex section (also covers the used part of the data section).
+    header_->SetChecksum(CompactDexFile::CalculateChecksum(output->GetMainSection()->Begin(),
+                                                           output->GetMainSection()->Size(),
+                                                           output->GetDataSection()->Begin(),
+                                                           output->GetDataSection()->Size()));
     // Rewrite the header with the calculated checksum.
     WriteHeader(main_stream);
   }
-  // Trim the map to make it sized as large as the dex file.
-  output->GetMainSection()->Resize(header_->FileSize());
 }
 
 std::unique_ptr<DexContainer> CompactDexWriter::CreateDexContainer() const {
diff --git a/dexlayout/compact_dex_writer.h b/dexlayout/compact_dex_writer.h
index 626b85a..4834bfc 100644
--- a/dexlayout/compact_dex_writer.h
+++ b/dexlayout/compact_dex_writer.h
@@ -86,6 +86,21 @@
                        HashedMemoryRange::HashEqual> dedupe_map_;
   };
 
+  // Handles alignment and deduping of a data section item.
+  class ScopedDataSectionItem {
+   public:
+    ScopedDataSectionItem(Stream* stream, dex_ir::Item* item, size_t alignment, Deduper* deduper);
+    ~ScopedDataSectionItem();
+    size_t Written() const;
+
+   private:
+    Stream* const stream_;
+    dex_ir::Item* const item_;
+    const size_t alignment_;
+    Deduper* deduper_;
+    const uint32_t start_offset_;
+  };
+
  public:
   class Container : public DexContainer {
    public:
@@ -107,6 +122,7 @@
     VectorSection main_section_;
     VectorSection data_section_;
     Deduper code_item_dedupe_;
+    Deduper data_item_dedupe_;
 
     friend class CompactDexWriter;
   };
@@ -122,7 +138,11 @@
 
   uint32_t WriteDebugInfoOffsetTable(Stream* stream);
 
-  uint32_t WriteCodeItem(Stream* stream, dex_ir::CodeItem* code_item, bool reserve_only) OVERRIDE;
+  void WriteCodeItem(Stream* stream, dex_ir::CodeItem* code_item, bool reserve_only) OVERRIDE;
+
+  void WriteStringData(Stream* stream, dex_ir::StringData* string_data) OVERRIDE;
+
+  void WriteDebugInfoItem(Stream* stream, dex_ir::DebugInfoItem* debug_info) OVERRIDE;
 
   void SortDebugInfosByMethodIndex();
 
@@ -140,6 +160,7 @@
 
   // State for where we are deduping.
   Deduper* code_item_dedupe_ = nullptr;
+  Deduper* data_item_dedupe_ = nullptr;
 
   DISALLOW_COPY_AND_ASSIGN(CompactDexWriter);
 };
diff --git a/dexlayout/dex_container.h b/dexlayout/dex_container.h
index 7c426cb..2b9a5f9 100644
--- a/dexlayout/dex_container.h
+++ b/dexlayout/dex_container.h
@@ -43,6 +43,9 @@
     // Resize the backing storage.
     virtual void Resize(size_t size) = 0;
 
+    // Clear the container.
+    virtual void Clear() = 0;
+
     // Returns the end of the memory region.
     uint8_t* End() {
       return Begin() + Size();
@@ -66,6 +69,10 @@
       data_.resize(size, 0u);
     }
 
+    void Clear() OVERRIDE {
+      data_.clear();
+    }
+
    private:
     std::vector<uint8_t> data_;
   };
diff --git a/dexlayout/dex_ir.cc b/dexlayout/dex_ir.cc
index fb7dff6..1525d53 100644
--- a/dexlayout/dex_ir.cc
+++ b/dexlayout/dex_ir.cc
@@ -280,7 +280,7 @@
     }
     case DexFile::kDexAnnotationArray: {
       EncodedValueVector* values = new EncodedValueVector();
-      const uint32_t offset = *data - dex_file.Begin();
+      const uint32_t offset = *data - dex_file.DataBegin();
       const uint32_t size = DecodeUnsignedLeb128(data);
       // Decode all elements.
       for (uint32_t i = 0; i < size; i++) {
@@ -440,7 +440,7 @@
 AnnotationItem* Collections::CreateAnnotationItem(const DexFile& dex_file,
                                                   const DexFile::AnnotationItem* annotation) {
   const uint8_t* const start_data = reinterpret_cast<const uint8_t*>(annotation);
-  const uint32_t offset = start_data - dex_file.Begin();
+  const uint32_t offset = start_data - dex_file.DataBegin();
   AnnotationItem* annotation_item = annotation_items_map_.GetExistingObject(offset);
   if (annotation_item == nullptr) {
     uint8_t visibility = annotation->visibility_;
@@ -772,8 +772,7 @@
 
 void Collections::CreateCallSitesAndMethodHandles(const DexFile& dex_file) {
   // Iterate through the map list and set the offset of the CallSiteIds and MethodHandleItems.
-  const DexFile::MapList* map =
-      reinterpret_cast<const DexFile::MapList*>(dex_file.Begin() + MapListOffset());
+  const DexFile::MapList* map = dex_file.GetMapList();
   for (uint32_t i = 0; i < map->size_; ++i) {
     const DexFile::MapItem* item = map->list_ + i;
     switch (item->type_) {
@@ -799,7 +798,7 @@
 
 void Collections::CreateCallSiteId(const DexFile& dex_file, uint32_t i) {
   const DexFile::CallSiteIdItem& disk_call_site_id = dex_file.GetCallSiteId(i);
-  const uint8_t* disk_call_item_ptr = dex_file.Begin() + disk_call_site_id.data_off_;
+  const uint8_t* disk_call_item_ptr = dex_file.DataBegin() + disk_call_site_id.data_off_;
   EncodedArrayItem* call_site_item =
       CreateEncodedArrayItem(dex_file, disk_call_item_ptr, disk_call_site_id.data_off_);
 
diff --git a/dexlayout/dex_ir_builder.cc b/dexlayout/dex_ir_builder.cc
index 231826b..3ec163ce 100644
--- a/dexlayout/dex_ir_builder.cc
+++ b/dexlayout/dex_ir_builder.cc
@@ -83,8 +83,8 @@
 
   // Load the link data if it exists.
   collections.SetLinkData(std::vector<uint8_t>(
-      dex_file.Begin() + dex_file.GetHeader().link_off_,
-      dex_file.Begin() + dex_file.GetHeader().link_off_ + dex_file.GetHeader().link_size_));
+      dex_file.DataBegin() + dex_file.GetHeader().link_off_,
+      dex_file.DataBegin() + dex_file.GetHeader().link_off_ + dex_file.GetHeader().link_size_));
 
   return header;
 }
@@ -92,8 +92,7 @@
 static void CheckAndSetRemainingOffsets(const DexFile& dex_file, Collections* collections) {
   const DexFile::Header& disk_header = dex_file.GetHeader();
   // Read MapItems and validate/set remaining offsets.
-  const DexFile::MapList* map =
-      reinterpret_cast<const DexFile::MapList*>(dex_file.Begin() + disk_header.map_off_);
+  const DexFile::MapList* map = dex_file.GetMapList();
   const uint32_t count = map->size_;
   for (uint32_t i = 0; i < count; ++i) {
     const DexFile::MapItem* item = map->list_ + i;
diff --git a/dexlayout/dex_writer.cc b/dexlayout/dex_writer.cc
index eb038a0..a9aa97d 100644
--- a/dexlayout/dex_writer.cc
+++ b/dexlayout/dex_writer.cc
@@ -30,6 +30,8 @@
 
 namespace art {
 
+constexpr uint32_t DexWriter::kDataSectionAlignment;
+
 static size_t EncodeIntValue(int32_t value, uint8_t* buffer) {
   size_t length = 0;
   if (value >= 0) {
@@ -252,15 +254,19 @@
   return stream->Tell() - start;
 }
 
+void DexWriter::WriteStringData(Stream* stream, dex_ir::StringData* string_data) {
+  ProcessOffset(stream, string_data);
+  stream->AlignTo(SectionAlignment(DexFile::kDexTypeStringDataItem));
+  stream->WriteUleb128(CountModifiedUtf8Chars(string_data->Data()));
+  stream->Write(string_data->Data(), strlen(string_data->Data()));
+  // Skip null terminator (already zeroed out, no need to write).
+  stream->Skip(1);
+}
+
 uint32_t DexWriter::WriteStringDatas(Stream* stream) {
   const uint32_t start = stream->Tell();
   for (std::unique_ptr<dex_ir::StringData>& string_data : header_->GetCollections().StringDatas()) {
-    ProcessOffset(stream, string_data.get());
-    stream->AlignTo(SectionAlignment(DexFile::kDexTypeStringDataItem));
-    stream->WriteUleb128(CountModifiedUtf8Chars(string_data->Data()));
-    stream->Write(string_data->Data(), strlen(string_data->Data()));
-    // Skip null terminator (already zeroed out, no need to write).
-    stream->Skip(1);
+    WriteStringData(stream, string_data.get());
   }
   if (compute_offsets_ && start != stream->Tell()) {
     header_->GetCollections().SetStringDatasOffset(start);
@@ -481,13 +487,17 @@
   return stream->Tell() - start;
 }
 
+void DexWriter::WriteDebugInfoItem(Stream* stream, dex_ir::DebugInfoItem* debug_info) {
+  stream->AlignTo(SectionAlignment(DexFile::kDexTypeDebugInfoItem));
+  ProcessOffset(stream, debug_info);
+  stream->Write(debug_info->GetDebugInfo(), debug_info->GetDebugInfoSize());
+}
+
 uint32_t DexWriter::WriteDebugInfoItems(Stream* stream) {
   const uint32_t start = stream->Tell();
   for (std::unique_ptr<dex_ir::DebugInfoItem>& debug_info :
       header_->GetCollections().DebugInfoItems()) {
-    stream->AlignTo(SectionAlignment(DexFile::kDexTypeDebugInfoItem));
-    ProcessOffset(stream, debug_info.get());
-    stream->Write(debug_info->GetDebugInfo(), debug_info->GetDebugInfoSize());
+    WriteDebugInfoItem(stream, debug_info.get());
   }
   if (compute_offsets_ && start != stream->Tell()) {
     header_->GetCollections().SetDebugInfoItemsOffset(start);
@@ -533,9 +543,9 @@
   return stream->Tell() - start_offset;
 }
 
-uint32_t DexWriter::WriteCodeItem(Stream* stream,
-                                  dex_ir::CodeItem* code_item,
-                                  bool reserve_only) {
+void DexWriter::WriteCodeItem(Stream* stream,
+                              dex_ir::CodeItem* code_item,
+                              bool reserve_only) {
   DCHECK(code_item != nullptr);
   const uint32_t start_offset = stream->Tell();
   stream->AlignTo(SectionAlignment(DexFile::kDexTypeCodeItem));
@@ -562,7 +572,6 @@
   if (reserve_only) {
     stream->Clear(start_offset, stream->Tell() - start_offset);
   }
-  return stream->Tell() - start_offset;
 }
 
 uint32_t DexWriter::WriteCodeItems(Stream* stream, bool reserve_only) {
@@ -573,13 +582,14 @@
   }
   const uint32_t start = stream->Tell();
   for (auto& code_item : header_->GetCollections().CodeItems()) {
-    const size_t code_item_size = WriteCodeItem(stream, code_item.get(), reserve_only);
+    uint32_t start_offset = stream->Tell();
+    WriteCodeItem(stream, code_item.get(), reserve_only);
     // Only add the section hotness info once.
     if (!reserve_only && code_section != nullptr) {
       auto it = dex_layout_->LayoutHotnessInfo().code_item_layout_.find(code_item.get());
       if (it != dex_layout_->LayoutHotnessInfo().code_item_layout_.end()) {
         code_section->parts_[static_cast<size_t>(it->second)].CombineSection(
-            stream->Tell() - code_item_size,
+            start_offset,
             stream->Tell());
       }
     }
diff --git a/dexlayout/dex_writer.h b/dexlayout/dex_writer.h
index e581a8b..e6e0533 100644
--- a/dexlayout/dex_writer.h
+++ b/dexlayout/dex_writer.h
@@ -260,7 +260,9 @@
   virtual uint32_t WriteCodeItemPostInstructionData(Stream* stream,
                                                     dex_ir::CodeItem* item,
                                                     bool reserve_only);
-  virtual uint32_t WriteCodeItem(Stream* stream, dex_ir::CodeItem* item, bool reserve_only);
+  virtual void WriteCodeItem(Stream* stream, dex_ir::CodeItem* item, bool reserve_only);
+  virtual void WriteDebugInfoItem(Stream* stream, dex_ir::DebugInfoItem* debug_info);
+  virtual void WriteStringData(Stream* stream, dex_ir::StringData* string_data);
 
   // Process an offset, if compute_offset is set, write into the dex ir item, otherwise read the
   // existing offset and use that for writing.
diff --git a/dexlayout/dexlayout.cc b/dexlayout/dexlayout.cc
index d33a0bd..1b32f7b 100644
--- a/dexlayout/dexlayout.cc
+++ b/dexlayout/dexlayout.cc
@@ -1838,13 +1838,17 @@
     }
   }
   DexWriter::Output(this, dex_container, compute_offsets);
-  DexContainer* const container = dex_container->get();
-  DexContainer::Section* const main_section = container->GetMainSection();
-  DexContainer::Section* const data_section = container->GetDataSection();
-  CHECK_EQ(data_section->Size(), 0u) << "Unsupported";
   if (new_file != nullptr) {
+    DexContainer* const container = dex_container->get();
+    DexContainer::Section* const main_section = container->GetMainSection();
     if (!new_file->WriteFully(main_section->Begin(), main_section->Size())) {
-      LOG(ERROR) << "Failed tow write dex file to " << dex_file_location;
+      LOG(ERROR) << "Failed to write main section for dex file " << dex_file_location;
+      new_file->Erase();
+      return;
+    }
+    DexContainer::Section* const data_section = container->GetDataSection();
+    if (!new_file->WriteFully(data_section->Begin(), data_section->Size())) {
+      LOG(ERROR) << "Failed to write data section for dex file " << dex_file_location;
       new_file->Erase();
       return;
     }
@@ -1919,17 +1923,22 @@
       // Dex file verifier cannot handle compact dex.
       bool verify = options_.compact_dex_level_ == CompactDexLevel::kCompactDexLevelNone;
       const ArtDexFileLoader dex_file_loader;
-      DexContainer::Section* section = (*dex_container)->GetMainSection();
-      DCHECK_EQ(file_size, section->Size());
+      DexContainer::Section* const main_section = (*dex_container)->GetMainSection();
+      DexContainer::Section* const data_section = (*dex_container)->GetDataSection();
+      DCHECK_EQ(file_size, main_section->Size())
+          << main_section->Size() << " " << data_section->Size();
       std::unique_ptr<const DexFile> output_dex_file(
-          dex_file_loader.Open(section->Begin(),
-                               file_size,
-                               location,
-                               /* checksum */ 0,
-                               /*oat_dex_file*/ nullptr,
-                               verify,
-                               /*verify_checksum*/ false,
-                               &error_msg));
+          dex_file_loader.OpenWithDataSection(
+              main_section->Begin(),
+              main_section->Size(),
+              data_section->Begin(),
+              data_section->Size(),
+              location,
+              /* checksum */ 0,
+              /*oat_dex_file*/ nullptr,
+              verify,
+              /*verify_checksum*/ false,
+              &error_msg));
       CHECK(output_dex_file != nullptr) << "Failed to re-open output file:" << error_msg;
 
       // Do IR-level comparison between input and output. This check ignores potential differences
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index f2a69f3..f7151b3 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1156,6 +1156,7 @@
       // Vdex unquicken output should match original input bytecode
       uint32_t orig_checksum =
           reinterpret_cast<DexFile::Header*>(const_cast<uint8_t*>(dex_file->Begin()))->checksum_;
+      CHECK_EQ(orig_checksum, dex_file->CalculateChecksum());
       if (orig_checksum != dex_file->CalculateChecksum()) {
         os << "Unexpected checksum from unquicken dex file '" << dex_file_location << "'\n";
         return false;
@@ -1208,7 +1209,11 @@
       return false;
     }
 
-    if (!file->WriteFully(dex_file->Begin(), fsize)) {
+    bool success = false;
+      success = file->WriteFully(dex_file->Begin(), fsize);
+    // }
+
+    if (!success) {
       os << "Failed to write dex file";
       file->Erase();
       return false;
diff --git a/openjdkjvmti/fixed_up_dex_file.cc b/openjdkjvmti/fixed_up_dex_file.cc
index 323137a..cc56a7b 100644
--- a/openjdkjvmti/fixed_up_dex_file.cc
+++ b/openjdkjvmti/fixed_up_dex_file.cc
@@ -70,8 +70,21 @@
 std::unique_ptr<FixedUpDexFile> FixedUpDexFile::Create(const art::DexFile& original) {
   // Copy the data into mutable memory.
   std::vector<unsigned char> data;
-  data.resize(original.Size());
-  memcpy(data.data(), original.Begin(), original.Size());
+  if (original.IsCompactDexFile()) {
+    // Compact dex has a separate data section that is relative from the original dex.
+    // We need to copy the shared data section so that dequickening doesn't change anything.
+    data.resize(original.Size() + original.DataSize());
+    memcpy(data.data(), original.Begin(), original.Size());
+    memcpy(data.data() + original.Size(), original.DataBegin(), original.DataSize());
+    // Go patch up the header to point to the copied data section.
+    art::CompactDexFile::Header* const header =
+        const_cast<art::CompactDexFile::Header*>(art::CompactDexFile::Header::At(data.data()));
+    header->data_off_ = original.Size();
+    header->data_size_ = original.DataSize();
+  } else {
+    data.resize(original.Size());
+    memcpy(data.data(), original.Begin(), original.Size());
+  }
   std::string error;
   const art::ArtDexFileLoader dex_file_loader;
   std::unique_ptr<const art::DexFile> new_dex_file(dex_file_loader.Open(
@@ -105,6 +118,7 @@
                               0,
                               &dex_container);
     art::DexContainer::Section* main_section = dex_container->GetMainSection();
+    CHECK_EQ(dex_container->GetDataSection()->Size(), 0u);
     // Overwrite the dex file stored in data with the new result.
     data.clear();
     data.insert(data.end(), main_section->Begin(), main_section->End());
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 1671a24..737d2a8 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -794,9 +794,6 @@
     .extern artInstanceOfFromCode
     .extern artThrowClassCastExceptionForObject
 ENTRY art_quick_check_instance_of
-    // Type check using the bit string passes null as the target class. In that case just throw.
-    cbz r1, .Lthrow_class_cast_exception_for_bitstring_check
-
     push {r0-r2, lr}                    @ save arguments, padding (r2) and link register
     .cfi_adjust_cfa_offset 16
     .cfi_rel_offset r0, 0
@@ -815,7 +812,6 @@
     .cfi_restore r2
     .cfi_restore lr
 
-.Lthrow_class_cast_exception_for_bitstring_check:
     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME r2       @ save all registers as basis for long jump context
     mov r2, r9                      @ pass Thread::Current
     bl  artThrowClassCastExceptionForObject  @ (Object*, Class*, Thread*)
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 0614118..b0e7b0a 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1333,9 +1333,6 @@
     .extern artInstanceOfFromCode
     .extern artThrowClassCastExceptionForObject
 ENTRY art_quick_check_instance_of
-    // Type check using the bit string passes null as the target class. In that case just throw.
-    cbz x1, .Lthrow_class_cast_exception_for_bitstring_check
-
     // Store arguments and link register
     // Stack needs to be 16B aligned on calls.
     SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 32
@@ -1361,7 +1358,6 @@
     // Restore
     RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, 32
 
-.Lthrow_class_cast_exception_for_bitstring_check:
     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // save all registers as basis for long jump context
     mov x2, xSELF                     // pass Thread::Current
     bl artThrowClassCastExceptionForObject     // (Object*, Class*, Thread*)
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index d8fe480..b2f7e10 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -1423,10 +1423,6 @@
     .extern artInstanceOfFromCode
     .extern artThrowClassCastExceptionForObject
 ENTRY art_quick_check_instance_of
-    // Type check using the bit string passes null as the target class. In that case just throw.
-    beqz   $a1, .Lthrow_class_cast_exception_for_bitstring_check
-    nop
-
     addiu  $sp, $sp, -32
     .cfi_adjust_cfa_offset 32
     sw     $gp, 16($sp)
@@ -1445,15 +1441,12 @@
     jalr   $zero, $ra
     addiu  $sp, $sp, 32
     .cfi_adjust_cfa_offset -32
-
 .Lthrow_class_cast_exception:
     lw     $t9, 8($sp)
     lw     $a1, 4($sp)
     lw     $a0, 0($sp)
     addiu  $sp, $sp, 32
     .cfi_adjust_cfa_offset -32
-
-.Lthrow_class_cast_exception_for_bitstring_check:
     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
     la   $t9, artThrowClassCastExceptionForObject
     jalr $zero, $t9                 # artThrowClassCastException (Object*, Class*, Thread*)
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index a5edc1f..63f4f6c 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1364,9 +1364,6 @@
     .extern artInstanceOfFromCode
     .extern artThrowClassCastExceptionForObject
 ENTRY art_quick_check_instance_of
-    // Type check using the bit string passes null as the target class. In that case just throw.
-    beqzc  $a1, .Lthrow_class_cast_exception_for_bitstring_check
-
     daddiu $sp, $sp, -32
     .cfi_adjust_cfa_offset 32
     sd     $ra, 24($sp)
@@ -1382,15 +1379,12 @@
     jalr   $zero, $ra
     daddiu $sp, $sp, 32
     .cfi_adjust_cfa_offset -32
-
 .Lthrow_class_cast_exception:
     ld     $t9, 16($sp)
     ld     $a1, 8($sp)
     ld     $a0, 0($sp)
     daddiu $sp, $sp, 32
     .cfi_adjust_cfa_offset -32
-
-.Lthrow_class_cast_exception_for_bitstring_check:
     SETUP_GP
     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
     dla  $t9, artThrowClassCastExceptionForObject
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index d64e2fd..5a28120 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1431,10 +1431,6 @@
 END_FUNCTION art_quick_instance_of
 
 DEFINE_FUNCTION art_quick_check_instance_of
-    // Type check using the bit string passes null as the target class. In that case just throw.
-    testl %ecx, %ecx
-    jz .Lthrow_class_cast_exception_for_bitstring_check
-
     PUSH eax                              // alignment padding
     PUSH ecx                              // pass arg2 - checked class
     PUSH eax                              // pass arg1 - obj
@@ -1452,7 +1448,6 @@
     addl LITERAL(4), %esp
     CFI_ADJUST_CFA_OFFSET(-4)
 
-.Lthrow_class_cast_exception_for_bitstring_check:
     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
     // Outgoing argument set up
     PUSH eax                              // alignment padding
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index 81ad780..781ade9 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1402,10 +1402,6 @@
 END_FUNCTION art_quick_unlock_object_no_inline
 
 DEFINE_FUNCTION art_quick_check_instance_of
-    // Type check using the bit string passes null as the target class. In that case just throw.
-    testl %esi, %esi
-    jz .Lthrow_class_cast_exception_for_bitstring_check
-
     // We could check the super classes here but that is usually already checked in the caller.
     PUSH rdi                          // Save args for exc
     PUSH rsi
@@ -1429,7 +1425,6 @@
     POP rsi                           // Pop arguments
     POP rdi
 
-.Lthrow_class_cast_exception_for_bitstring_check:
     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // save all registers as basis for long jump context
     mov %gs:THREAD_SELF_OFFSET, %rdx  // pass Thread::Current()
     call SYMBOL(artThrowClassCastExceptionForObject)  // (Object* src, Class* dest, Thread*)
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index 0fcf394..e87f631 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -56,7 +56,6 @@
   "CtorFenceIns ",
   "InvokeInputs ",
   "PhiInputs    ",
-  "TypeCheckIns ",
   "LoopInfo     ",
   "LIBackEdges  ",
   "TryCatchInf  ",
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 5f3fc02a..beaba67 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -62,7 +62,6 @@
   kArenaAllocConstructorFenceInputs,
   kArenaAllocInvokeInputs,
   kArenaAllocPhiInputs,
-  kArenaAllocTypeCheckInputs,
   kArenaAllocLoopInfo,
   kArenaAllocLoopInfoBackEdges,
   kArenaAllocTryCatchInfo,
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index e7ee9f2..af45a69 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -4482,14 +4482,6 @@
 
   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
 
-  // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
-  // See also ClassLinker::EnsureInitialized().
-  {
-    MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
-    SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
-    // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
-  }
-
   {
     // Lock on klass is released. Lock new class object.
     ObjectLock<mirror::Class> initialization_lock(self, klass);
diff --git a/runtime/dex/art_dex_file_loader.cc b/runtime/dex/art_dex_file_loader.cc
index dee736e..08cf30d 100644
--- a/runtime/dex/art_dex_file_loader.cc
+++ b/runtime/dex/art_dex_file_loader.cc
@@ -154,6 +154,8 @@
   ScopedTrace trace(std::string("Open dex file from RAM ") + location);
   return OpenCommon(base,
                     size,
+                    /*data_base*/ nullptr,
+                    /*data_size*/ 0u,
                     location,
                     location_checksum,
                     oat_dex_file,
@@ -182,6 +184,8 @@
 
   std::unique_ptr<DexFile> dex_file = OpenCommon(map->Begin(),
                                                  map->Size(),
+                                                 /*data_base*/ nullptr,
+                                                 /*data_size*/ 0u,
                                                  location,
                                                  location_checksum,
                                                  kNoOatDexFile,
@@ -303,6 +307,8 @@
 
   std::unique_ptr<DexFile> dex_file = OpenCommon(map->Begin(),
                                                  map->Size(),
+                                                 /*data_base*/ nullptr,
+                                                 /*data_size*/ 0u,
                                                  location,
                                                  dex_header->checksum_,
                                                  kNoOatDexFile,
@@ -370,6 +376,8 @@
   VerifyResult verify_result;
   std::unique_ptr<DexFile> dex_file = OpenCommon(map->Begin(),
                                                  map->Size(),
+                                                 /*data_base*/ nullptr,
+                                                 /*data_size*/ 0u,
                                                  location,
                                                  zip_entry->GetCrc32(),
                                                  kNoOatDexFile,
diff --git a/runtime/dex/code_item_accessors-no_art-inl.h b/runtime/dex/code_item_accessors-no_art-inl.h
index 6a99009..a243a4a 100644
--- a/runtime/dex/code_item_accessors-no_art-inl.h
+++ b/runtime/dex/code_item_accessors-no_art-inl.h
@@ -50,7 +50,7 @@
 inline void CodeItemInstructionAccessor::Init(const DexFile& dex_file,
                                               const DexFile::CodeItem* code_item) {
   if (code_item != nullptr) {
-    DCHECK(dex_file.HasAddress(code_item));
+    DCHECK(dex_file.IsInDataSection(code_item));
     if (dex_file.IsCompactDexFile()) {
       Init(down_cast<const CompactDexFile::CodeItem&>(*code_item));
     } else {
diff --git a/runtime/dex/code_item_accessors_test.cc b/runtime/dex/code_item_accessors_test.cc
index 3380be8..8e2548b 100644
--- a/runtime/dex/code_item_accessors_test.cc
+++ b/runtime/dex/code_item_accessors_test.cc
@@ -39,8 +39,12 @@
                            &error_msg));
   CHECK(map != nullptr) << error_msg;
   if (compact_dex) {
-    CompactDexFile::WriteMagic(map->Begin());
-    CompactDexFile::WriteCurrentVersion(map->Begin());
+    CompactDexFile::Header* header =
+        const_cast<CompactDexFile::Header*>(CompactDexFile::Header::At(map->Begin()));
+    CompactDexFile::WriteMagic(header->magic_);
+    CompactDexFile::WriteCurrentVersion(header->magic_);
+    header->data_off_ = 0;
+    header->data_size_ = map->Size();
   } else {
     StandardDexFile::WriteMagic(map->Begin());
     StandardDexFile::WriteCurrentVersion(map->Begin());
diff --git a/runtime/dex/compact_dex_file.cc b/runtime/dex/compact_dex_file.cc
index ff193ff..37f5d00 100644
--- a/runtime/dex/compact_dex_file.cc
+++ b/runtime/dex/compact_dex_file.cc
@@ -56,27 +56,52 @@
 }
 
 uint32_t CompactDexFile::GetCodeItemSize(const DexFile::CodeItem& item) const {
-  // TODO: Clean up this temporary code duplication with StandardDexFile. Eventually the
-  // implementations will differ.
-  DCHECK(HasAddress(&item));
+  DCHECK(IsInDataSection(&item));
   return reinterpret_cast<uintptr_t>(CodeItemDataAccessor(*this, &item).CodeItemDataEnd()) -
       reinterpret_cast<uintptr_t>(&item);
 }
 
+
+uint32_t CompactDexFile::CalculateChecksum(const uint8_t* base_begin,
+                                           size_t base_size,
+                                           const uint8_t* data_begin,
+                                           size_t data_size) {
+  Header temp_header(*Header::At(base_begin));
+  // Zero out fields that are not included in the sum.
+  temp_header.checksum_ = 0u;
+  temp_header.data_off_ = 0u;
+  temp_header.data_size_ = 0u;
+  uint32_t checksum = ChecksumMemoryRange(reinterpret_cast<const uint8_t*>(&temp_header),
+                                          sizeof(temp_header));
+  // Exclude the header since we already computed it's checksum.
+  checksum = (checksum * 31) ^ ChecksumMemoryRange(base_begin + sizeof(temp_header),
+                                                   base_size - sizeof(temp_header));
+  checksum = (checksum * 31) ^ ChecksumMemoryRange(data_begin, data_size);
+  return checksum;
+}
+
+uint32_t CompactDexFile::CalculateChecksum() const {
+  return CalculateChecksum(Begin(), Size(), DataBegin(), DataSize());
+}
+
 CompactDexFile::CompactDexFile(const uint8_t* base,
                                size_t size,
+                               const uint8_t* data_begin,
+                               size_t data_size,
                                const std::string& location,
                                uint32_t location_checksum,
                                const OatDexFile* oat_dex_file,
                                DexFileContainer* container)
     : DexFile(base,
               size,
+              data_begin,
+              data_size,
               location,
               location_checksum,
               oat_dex_file,
               container,
               /*is_compact_dex*/ true),
-      debug_info_offsets_(Begin() + GetHeader().debug_info_offsets_pos_,
+      debug_info_offsets_(DataBegin() + GetHeader().debug_info_offsets_pos_,
                           GetHeader().debug_info_base_,
                           GetHeader().debug_info_offsets_table_offset_) {}
 
diff --git a/runtime/dex/compact_dex_file.h b/runtime/dex/compact_dex_file.h
index 8dad84d..1ecff04 100644
--- a/runtime/dex/compact_dex_file.h
+++ b/runtime/dex/compact_dex_file.h
@@ -35,10 +35,22 @@
 
   class Header : public DexFile::Header {
    public:
+    static const Header* At(const void* at) {
+      return reinterpret_cast<const Header*>(at);
+    }
+
     uint32_t GetFeatureFlags() const {
       return feature_flags_;
     }
 
+    uint32_t GetDataOffset() const {
+      return data_off_;
+    }
+
+    uint32_t GetDataSize() const {
+      return data_size_;
+    }
+
    private:
     uint32_t feature_flags_ = 0u;
 
@@ -245,9 +257,17 @@
     return debug_info_offsets_.GetDebugInfoOffset(dex_method_index);
   }
 
+  static uint32_t CalculateChecksum(const uint8_t* base_begin,
+                                    size_t base_size,
+                                    const uint8_t* data_begin,
+                                    size_t data_size);
+  virtual uint32_t CalculateChecksum() const OVERRIDE;
+
  private:
   CompactDexFile(const uint8_t* base,
                  size_t size,
+                 const uint8_t* data_begin,
+                 size_t data_size,
                  const std::string& location,
                  uint32_t location_checksum,
                  const OatDexFile* oat_dex_file,
diff --git a/runtime/dex/dex_file-inl.h b/runtime/dex/dex_file-inl.h
index 9b14514..aa53daa 100644
--- a/runtime/dex/dex_file-inl.h
+++ b/runtime/dex/dex_file-inl.h
@@ -29,14 +29,14 @@
 namespace art {
 
 inline int32_t DexFile::GetStringLength(const StringId& string_id) const {
-  const uint8_t* ptr = begin_ + string_id.string_data_off_;
+  const uint8_t* ptr = DataBegin() + string_id.string_data_off_;
   return DecodeUnsignedLeb128(&ptr);
 }
 
 inline const char* DexFile::GetStringDataAndUtf16Length(const StringId& string_id,
                                                         uint32_t* utf16_length) const {
   DCHECK(utf16_length != nullptr) << GetLocation();
-  const uint8_t* ptr = begin_ + string_id.string_data_off_;
+  const uint8_t* ptr = DataBegin() + string_id.string_data_off_;
   *utf16_length = DecodeUnsignedLeb128(&ptr);
   return reinterpret_cast<const char*>(ptr);
 }
diff --git a/runtime/dex/dex_file.cc b/runtime/dex/dex_file.cc
index 16325b8..6ec997c 100644
--- a/runtime/dex/dex_file.cc
+++ b/runtime/dex/dex_file.cc
@@ -50,9 +50,12 @@
 }
 
 uint32_t DexFile::CalculateChecksum(const uint8_t* begin, size_t size) {
-  const uint32_t non_sum = OFFSETOF_MEMBER(DexFile::Header, signature_);
-  const uint8_t* non_sum_ptr = begin + non_sum;
-  return adler32(adler32(0L, Z_NULL, 0), non_sum_ptr, size - non_sum);
+  const uint32_t non_sum_bytes = OFFSETOF_MEMBER(DexFile::Header, signature_);
+  return ChecksumMemoryRange(begin + non_sum_bytes, size - non_sum_bytes);
+}
+
+uint32_t DexFile::ChecksumMemoryRange(const uint8_t* begin, size_t size) {
+  return adler32(adler32(0L, Z_NULL, 0), begin, size);
 }
 
 int DexFile::GetPermissions() const {
@@ -77,6 +80,8 @@
 
 DexFile::DexFile(const uint8_t* base,
                  size_t size,
+                 const uint8_t* data_begin,
+                 size_t data_size,
                  const std::string& location,
                  uint32_t location_checksum,
                  const OatDexFile* oat_dex_file,
@@ -84,6 +89,8 @@
                  bool is_compact_dex)
     : begin_(base),
       size_(size),
+      data_begin_(data_begin),
+      data_size_(data_size),
       location_(location),
       location_checksum_(location_checksum),
       header_(reinterpret_cast<const Header*>(base)),
@@ -149,7 +156,7 @@
 }
 
 void DexFile::InitializeSectionsFromMapList() {
-  const MapList* map_list = reinterpret_cast<const MapList*>(begin_ + header_->map_off_);
+  const MapList* map_list = reinterpret_cast<const MapList*>(DataBegin() + header_->map_off_);
   if (header_->map_off_ == 0 || header_->map_off_ > size_) {
     // Bad offset. The dex file verifier runs after this method and will reject the file.
     return;
@@ -166,10 +173,10 @@
   for (size_t i = 0; i < count; ++i) {
     const MapItem& map_item = map_list->list_[i];
     if (map_item.type_ == kDexTypeMethodHandleItem) {
-      method_handles_ = reinterpret_cast<const MethodHandleItem*>(begin_ + map_item.offset_);
+      method_handles_ = reinterpret_cast<const MethodHandleItem*>(DataBegin() + map_item.offset_);
       num_method_handles_ = map_item.size_;
     } else if (map_item.type_ == kDexTypeCallSiteIdItem) {
-      call_site_ids_ = reinterpret_cast<const CallSiteIdItem*>(begin_ + map_item.offset_);
+      call_site_ids_ = reinterpret_cast<const CallSiteIdItem*>(DataBegin() + map_item.offset_);
       num_call_site_ids_ = map_item.size_;
     }
   }
diff --git a/runtime/dex/dex_file.h b/runtime/dex/dex_file.h
index 511ce31..ef25797 100644
--- a/runtime/dex/dex_file.h
+++ b/runtime/dex/dex_file.h
@@ -646,11 +646,7 @@
   const ClassDef* FindClassDef(dex::TypeIndex type_idx) const;
 
   const TypeList* GetInterfacesList(const ClassDef& class_def) const {
-    if (class_def.interfaces_off_ == 0) {
-      return nullptr;
-    }
-    const uint8_t* addr = begin_ + class_def.interfaces_off_;
-    return reinterpret_cast<const TypeList*>(addr);
+    return DataPointer<TypeList>(class_def.interfaces_off_);
   }
 
   uint32_t NumMethodHandles() const {
@@ -673,17 +669,13 @@
 
   // Returns a pointer to the raw memory mapped class_data_item
   const uint8_t* GetClassData(const ClassDef& class_def) const {
-    return (class_def.class_data_off_ == 0) ? nullptr : begin_ + class_def.class_data_off_;
+    return DataPointer<uint8_t>(class_def.class_data_off_);
   }
 
-  //
+  // Return the code item for a provided offset.
   const CodeItem* GetCodeItem(const uint32_t code_off) const {
-    DCHECK_LT(code_off, size_) << "Code item offset larger then maximum allowed offset";
-    if (code_off == 0) {
-      return nullptr;  // native or abstract method
-    }
-    const uint8_t* addr = begin_ + code_off;
-    return reinterpret_cast<const CodeItem*>(addr);
+    // May be null for native or abstract methods.
+    return DataPointer<CodeItem>(code_off);
   }
 
   const char* GetReturnTypeDescriptor(const ProtoId& proto_id) const;
@@ -728,17 +720,15 @@
   const char* GetShorty(uint32_t proto_idx) const;
 
   const TypeList* GetProtoParameters(const ProtoId& proto_id) const {
-    return (proto_id.parameters_off_ == 0)
-        ? nullptr
-        : reinterpret_cast<const TypeList*>(begin_ + proto_id.parameters_off_);
+    return DataPointer<TypeList>(proto_id.parameters_off_);
   }
 
   const uint8_t* GetEncodedStaticFieldValuesArray(const ClassDef& class_def) const {
-    return (class_def.static_values_off_ == 0) ? 0 : begin_ + class_def.static_values_off_;
+    return DataPointer<uint8_t>(class_def.static_values_off_);
   }
 
   const uint8_t* GetCallSiteEncodedValuesArray(const CallSiteIdItem& call_site_id) const {
-    return begin_ + call_site_id.data_off_;
+    return DataBegin() + call_site_id.data_off_;
   }
 
   static const TryItem* GetTryItems(const DexInstructionIterator& code_item_end, uint32_t offset);
@@ -756,7 +746,9 @@
     // Check that the offset is in bounds.
     // Note that although the specification says that 0 should be used if there
     // is no debug information, some applications incorrectly use 0xFFFFFFFF.
-    return (debug_info_off == 0 || debug_info_off >= size_) ? nullptr : begin_ + debug_info_off;
+    return (debug_info_off == 0 || debug_info_off >= data_size_)
+        ? nullptr
+        : DataBegin() + debug_info_off;
   }
 
   struct PositionInfo {
@@ -787,21 +779,17 @@
   static bool LineNumForPcCb(void* context, const PositionInfo& entry);
 
   const AnnotationsDirectoryItem* GetAnnotationsDirectory(const ClassDef& class_def) const {
-    return (class_def.annotations_off_ == 0)
-        ? nullptr
-        : reinterpret_cast<const AnnotationsDirectoryItem*>(begin_ + class_def.annotations_off_);
+    return DataPointer<AnnotationsDirectoryItem>(class_def.annotations_off_);
   }
 
   const AnnotationSetItem* GetClassAnnotationSet(const AnnotationsDirectoryItem* anno_dir) const {
-    return (anno_dir->class_annotations_off_ == 0)
-        ? nullptr
-        : reinterpret_cast<const AnnotationSetItem*>(begin_ + anno_dir->class_annotations_off_);
+    return DataPointer<AnnotationSetItem>(anno_dir->class_annotations_off_);
   }
 
   const FieldAnnotationsItem* GetFieldAnnotations(const AnnotationsDirectoryItem* anno_dir) const {
     return (anno_dir->fields_size_ == 0)
-        ? nullptr
-        : reinterpret_cast<const FieldAnnotationsItem*>(&anno_dir[1]);
+         ? nullptr
+         : reinterpret_cast<const FieldAnnotationsItem*>(&anno_dir[1]);
   }
 
   const MethodAnnotationsItem* GetMethodAnnotations(const AnnotationsDirectoryItem* anno_dir)
@@ -828,33 +816,21 @@
   }
 
   const AnnotationSetItem* GetFieldAnnotationSetItem(const FieldAnnotationsItem& anno_item) const {
-    uint32_t offset = anno_item.annotations_off_;
-    return (offset == 0)
-        ? nullptr
-        : reinterpret_cast<const AnnotationSetItem*>(begin_ + offset);
+    return DataPointer<AnnotationSetItem>(anno_item.annotations_off_);
   }
 
   const AnnotationSetItem* GetMethodAnnotationSetItem(const MethodAnnotationsItem& anno_item)
       const {
-    uint32_t offset = anno_item.annotations_off_;
-    return (offset == 0)
-        ? nullptr
-        : reinterpret_cast<const AnnotationSetItem*>(begin_ + offset);
+    return DataPointer<AnnotationSetItem>(anno_item.annotations_off_);
   }
 
   const AnnotationSetRefList* GetParameterAnnotationSetRefList(
       const ParameterAnnotationsItem* anno_item) const {
-    uint32_t offset = anno_item->annotations_off_;
-    return (offset == 0)
-        ? nullptr
-        : reinterpret_cast<const AnnotationSetRefList*>(begin_ + offset);
+    return DataPointer<AnnotationSetRefList>(anno_item->annotations_off_);
   }
 
   ALWAYS_INLINE const AnnotationItem* GetAnnotationItemAtOffset(uint32_t offset) const {
-    DCHECK_LE(offset, Size());
-    return (offset == 0)
-        ? nullptr
-        : reinterpret_cast<const AnnotationItem*>(begin_ + offset);
+    return DataPointer<AnnotationItem>(offset);
   }
 
   const AnnotationItem* GetAnnotationItem(const AnnotationSetItem* set_item, uint32_t index) const {
@@ -863,10 +839,7 @@
   }
 
   const AnnotationSetItem* GetSetRefItemItem(const AnnotationSetRefItem* anno_item) const {
-    uint32_t offset = anno_item->annotations_off_;
-    return (offset == 0)
-        ? nullptr
-        : reinterpret_cast<const AnnotationSetItem*>(begin_ + offset);
+    return DataPointer<AnnotationSetItem>(anno_item->annotations_off_);
   }
 
   // Debug info opcodes and constants
@@ -955,6 +928,20 @@
     return size_;
   }
 
+  const uint8_t* DataBegin() const {
+    return data_begin_;
+  }
+
+  size_t DataSize() const {
+    return data_size_;
+  }
+
+  template <typename T>
+  const T* DataPointer(size_t offset) const {
+    DCHECK_LT(offset, DataSize()) << "Offset past end of data section";
+    return (offset != 0u) ? reinterpret_cast<const T*>(DataBegin() + offset) : nullptr;
+  }
+
   const OatDexFile* GetOatDexFile() const {
     return oat_dex_file_;
   }
@@ -964,6 +951,11 @@
     oat_dex_file_ = oat_dex_file;
   }
 
+  // Read MapItems and validate/set remaining offsets.
+  const DexFile::MapList* GetMapList() const {
+    return reinterpret_cast<const DexFile::MapList*>(DataBegin() + header_->map_off_);
+  }
+
   // Utility methods for reading integral values from a buffer.
   static int32_t ReadSignedInt(const uint8_t* ptr, int zwidth);
   static uint32_t ReadUnsignedInt(const uint8_t* ptr, int zwidth, bool fill_on_right);
@@ -971,8 +963,9 @@
   static uint64_t ReadUnsignedLong(const uint8_t* ptr, int zwidth, bool fill_on_right);
 
   // Recalculates the checksum of the dex file. Does not use the current value in the header.
-  uint32_t CalculateChecksum() const;
+  virtual uint32_t CalculateChecksum() const;
   static uint32_t CalculateChecksum(const uint8_t* begin, size_t size);
+  static uint32_t ChecksumMemoryRange(const uint8_t* begin, size_t size);
 
   // Returns a human-readable form of the method at an index.
   std::string PrettyMethod(uint32_t method_idx, bool with_signature = true) const;
@@ -991,10 +984,14 @@
   ALWAYS_INLINE const StandardDexFile* AsStandardDexFile() const;
   ALWAYS_INLINE const CompactDexFile* AsCompactDexFile() const;
 
-  bool HasAddress(const void* addr) const {
+  bool IsInMainSection(const void* addr) const {
     return Begin() <= addr && addr < Begin() + Size();
   }
 
+  bool IsInDataSection(const void* addr) const {
+    return DataBegin() <= addr && addr < DataBegin() + DataSize();
+  }
+
   DexFileContainer* GetContainer() const {
     return container_.get();
   }
@@ -1005,6 +1002,8 @@
 
   DexFile(const uint8_t* base,
           size_t size,
+          const uint8_t* data_begin,
+          size_t data_size,
           const std::string& location,
           uint32_t location_checksum,
           const OatDexFile* oat_dex_file,
@@ -1026,6 +1025,12 @@
   // The size of the underlying memory allocation in bytes.
   const size_t size_;
 
+  // The base address of the data section (same as Begin() for standard dex).
+  const uint8_t* const data_begin_;
+
+  // The size of the data section.
+  const size_t data_size_;
+
   // Typically the dex file name when available, alternatively some identifying string.
   //
   // The ClassLinker will use this to match DexFiles the boot class
diff --git a/runtime/dex/dex_file_loader.cc b/runtime/dex/dex_file_loader.cc
index ccad19f..7dde0a4 100644
--- a/runtime/dex/dex_file_loader.cc
+++ b/runtime/dex/dex_file_loader.cc
@@ -222,6 +222,33 @@
                                                    std::string* error_msg) const {
   return OpenCommon(base,
                     size,
+                    /*data_base*/ base,
+                    /*data_size*/ size,
+                    location,
+                    location_checksum,
+                    oat_dex_file,
+                    verify,
+                    verify_checksum,
+                    error_msg,
+                    /*container*/ nullptr,
+                    /*verify_result*/ nullptr);
+}
+
+std::unique_ptr<const DexFile> DexFileLoader::OpenWithDataSection(
+    const uint8_t* base,
+    size_t size,
+    const uint8_t* data_base,
+    size_t data_size,
+    const std::string& location,
+    uint32_t location_checksum,
+    const OatDexFile* oat_dex_file,
+    bool verify,
+    bool verify_checksum,
+    std::string* error_msg) const {
+  return OpenCommon(base,
+                    size,
+                    data_base,
+                    data_size,
                     location,
                     location_checksum,
                     oat_dex_file,
@@ -278,6 +305,8 @@
 
 std::unique_ptr<DexFile> DexFileLoader::OpenCommon(const uint8_t* base,
                                                    size_t size,
+                                                   const uint8_t* data_base,
+                                                   size_t data_size,
                                                    const std::string& location,
                                                    uint32_t location_checksum,
                                                    const OatDexFile* oat_dex_file,
@@ -291,11 +320,32 @@
   }
   std::unique_ptr<DexFile> dex_file;
   if (StandardDexFile::IsMagicValid(base)) {
-    dex_file.reset(
-        new StandardDexFile(base, size, location, location_checksum, oat_dex_file, container));
+    if (data_size != 0) {
+      CHECK_EQ(base, data_base) << "Unsupported for standard dex";
+    }
+    dex_file.reset(new StandardDexFile(base,
+                                       size,
+                                       location,
+                                       location_checksum,
+                                       oat_dex_file,
+                                       container));
   } else if (CompactDexFile::IsMagicValid(base)) {
-    dex_file.reset(
-        new CompactDexFile(base, size, location, location_checksum, oat_dex_file, container));
+    if (data_base == nullptr) {
+      // TODO: Is there a clean way to support both an explicit data section and reading the one
+      // from the header.
+      CHECK_EQ(data_size, 0u);
+      const CompactDexFile::Header* const header = CompactDexFile::Header::At(base);
+      data_base = base + header->data_off_;
+      data_size = header->data_size_;
+    }
+    dex_file.reset(new CompactDexFile(base,
+                                      size,
+                                      data_base,
+                                      data_size,
+                                      location,
+                                      location_checksum,
+                                      oat_dex_file,
+                                      container));
   }
   if (dex_file == nullptr) {
     *error_msg = StringPrintf("Failed to open dex file '%s' from memory: %s", location.c_str(),
@@ -353,6 +403,8 @@
   VerifyResult verify_result;
   std::unique_ptr<const DexFile> dex_file = OpenCommon(map.data(),
                                                        map.size(),
+                                                       /*data_base*/ nullptr,
+                                                       /*data_size*/ 0u,
                                                        location,
                                                        zip_entry->GetCrc32(),
                                                        /*oat_dex_file*/ nullptr,
diff --git a/runtime/dex/dex_file_loader.h b/runtime/dex/dex_file_loader.h
index 05a51d0..508397c 100644
--- a/runtime/dex/dex_file_loader.h
+++ b/runtime/dex/dex_file_loader.h
@@ -120,6 +120,20 @@
                                               bool verify_checksum,
                                               std::string* error_msg) const;
 
+  // Open a dex file with a separate data section.
+  virtual std::unique_ptr<const DexFile> OpenWithDataSection(
+      const uint8_t* base,
+      size_t size,
+      const uint8_t* data_base,
+      size_t data_size,
+      const std::string& location,
+      uint32_t location_checksum,
+      const OatDexFile* oat_dex_file,
+      bool verify,
+      bool verify_checksum,
+      std::string* error_msg) const;
+
+
   // Opens all .dex files found in the memory map, guessing the container format based on file
   // extension.
   virtual bool OpenAll(const uint8_t* base,
@@ -148,6 +162,8 @@
 
   static std::unique_ptr<DexFile> OpenCommon(const uint8_t* base,
                                              size_t size,
+                                             const uint8_t* data_base,
+                                             size_t data_size,
                                              const std::string& location,
                                              uint32_t location_checksum,
                                              const OatDexFile* oat_dex_file,
diff --git a/runtime/dex/standard_dex_file.cc b/runtime/dex/standard_dex_file.cc
index 52fdff3..024f73b 100644
--- a/runtime/dex/standard_dex_file.cc
+++ b/runtime/dex/standard_dex_file.cc
@@ -73,10 +73,7 @@
 }
 
 uint32_t StandardDexFile::GetCodeItemSize(const DexFile::CodeItem& item) const {
-  DCHECK(HasAddress(&item));
-  // TODO: Clean up this temporary code duplication with StandardDexFile. Eventually the
-  // implementations will differ.
-  DCHECK(HasAddress(&item));
+  DCHECK(IsInDataSection(&item));
   return reinterpret_cast<uintptr_t>(CodeItemDataAccessor(*this, &item).CodeItemDataEnd()) -
       reinterpret_cast<uintptr_t>(&item);
 }
diff --git a/runtime/dex/standard_dex_file.h b/runtime/dex/standard_dex_file.h
index 6437def..94ef1f2 100644
--- a/runtime/dex/standard_dex_file.h
+++ b/runtime/dex/standard_dex_file.h
@@ -92,6 +92,8 @@
                   DexFileContainer* container)
       : DexFile(base,
                 size,
+                /*data_begin*/ base,
+                /*data_size*/ size,
                 location,
                 location_checksum,
                 oat_dex_file,
diff --git a/runtime/entrypoints/quick/quick_throw_entrypoints.cc b/runtime/entrypoints/quick/quick_throw_entrypoints.cc
index 4b26bee..565b4ed 100644
--- a/runtime/entrypoints/quick/quick_throw_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_throw_entrypoints.cc
@@ -15,11 +15,8 @@
  */
 
 #include "callee_save_frame.h"
-#include "dex/code_item_accessors-inl.h"
-#include "dex/dex_instruction-inl.h"
 #include "common_throws.h"
 #include "mirror/object-inl.h"
-#include "nth_caller_visitor.h"
 #include "thread.h"
 #include "well_known_classes.h"
 
@@ -114,21 +111,6 @@
                                                      Thread* self)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
-  if (dest_type == nullptr) {
-    // Find the target class for check cast using the bitstring check (dest_type == null).
-    NthCallerVisitor visitor(self, 0u);
-    visitor.WalkStack();
-    DCHECK(visitor.caller != nullptr);
-    uint32_t dex_pc = visitor.GetDexPc();
-    CodeItemDataAccessor accessor(visitor.caller);
-    const Instruction& check_cast = accessor.InstructionAt(dex_pc);
-    DCHECK_EQ(check_cast.Opcode(), Instruction::CHECK_CAST);
-    dex::TypeIndex type_index(check_cast.VRegB_21c());
-    ClassLinker* linker = Runtime::Current()->GetClassLinker();
-    dest_type = linker->LookupResolvedType(type_index, visitor.caller).Ptr();
-    CHECK(dest_type != nullptr) << "Target class should have been previously resolved: "
-        << visitor.caller->GetDexFile()->PrettyType(type_index);
-  }
   DCHECK(!dest_type->IsAssignableFrom(src_type));
   ThrowClassCastException(dest_type, src_type);
   self->QuickDeliverException();
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 57d3d50..faa6195 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -1293,10 +1293,6 @@
   // Parallel GC data structures.
   std::unique_ptr<ThreadPool> thread_pool_;
 
-  // Estimated allocation rate (bytes / second). Computed between the time of the last GC cycle
-  // and the start of the current one.
-  uint64_t allocation_rate_;
-
   // For a GC cycle, a bitmap that is set corresponding to the
   std::unique_ptr<accounting::HeapBitmap> live_bitmap_ GUARDED_BY(Locks::heap_bitmap_lock_);
   std::unique_ptr<accounting::HeapBitmap> mark_bitmap_ GUARDED_BY(Locks::heap_bitmap_lock_);
diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc
index 9fd9905..307f7b9 100644
--- a/runtime/oat_file.cc
+++ b/runtime/oat_file.cc
@@ -1653,7 +1653,12 @@
     if (lookup_table_data_ + TypeLookupTable::RawDataLength(num_class_defs) > GetOatFile()->End()) {
       LOG(WARNING) << "found truncated lookup table in " << dex_file_location_;
     } else {
-      lookup_table_ = TypeLookupTable::Open(dex_file_pointer_, lookup_table_data_, num_class_defs);
+      const uint8_t* dex_data = dex_file_pointer_;
+      // TODO: Clean this up to create the type lookup table after the dex file has been created?
+      if (CompactDexFile::IsMagicValid(dex_header->magic_)) {
+        dex_data += dex_header->data_off_;
+      }
+      lookup_table_ = TypeLookupTable::Open(dex_data, lookup_table_data_, num_class_defs);
     }
   }
 }
@@ -1733,9 +1738,17 @@
                                                            size_t hash) {
   const OatFile::OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
   DCHECK_EQ(ComputeModifiedUtf8Hash(descriptor), hash);
+  bool used_lookup_table = false;
+  const DexFile::ClassDef* lookup_table_classdef = nullptr;
   if (LIKELY((oat_dex_file != nullptr) && (oat_dex_file->GetTypeLookupTable() != nullptr))) {
+    used_lookup_table = true;
     const uint32_t class_def_idx = oat_dex_file->GetTypeLookupTable()->Lookup(descriptor, hash);
-    return (class_def_idx != dex::kDexNoIndex) ? &dex_file.GetClassDef(class_def_idx) : nullptr;
+    lookup_table_classdef = (class_def_idx != dex::kDexNoIndex)
+        ? &dex_file.GetClassDef(class_def_idx)
+        : nullptr;
+    if (!kIsDebugBuild) {
+      return lookup_table_classdef;
+    }
   }
   // Fast path for rare no class defs case.
   const uint32_t num_class_defs = dex_file.NumClassDefs();
@@ -1745,7 +1758,11 @@
   const DexFile::TypeId* type_id = dex_file.FindTypeId(descriptor);
   if (type_id != nullptr) {
     dex::TypeIndex type_idx = dex_file.GetIndexForTypeId(*type_id);
-    return dex_file.FindClassDef(type_idx);
+    const DexFile::ClassDef* found_class_def = dex_file.FindClassDef(type_idx);
+    if (kIsDebugBuild && used_lookup_table) {
+      DCHECK_EQ(found_class_def, lookup_table_classdef);
+    }
+    return found_class_def;
   }
   return nullptr;
 }
diff --git a/runtime/subtype_check.h b/runtime/subtype_check.h
index 03a6d9c..54d2f00 100644
--- a/runtime/subtype_check.h
+++ b/runtime/subtype_check.h
@@ -283,17 +283,6 @@
     return SubtypeCheckInfo::kUninitialized;
   }
 
-  // Retrieve the state of this class's SubtypeCheckInfo.
-  //
-  // Cost: O(Depth(Class)).
-  //
-  // Returns: The precise SubtypeCheckInfo::State.
-  static SubtypeCheckInfo::State GetState(ClassPtr klass)
-      REQUIRES(Locks::subtype_check_lock_)
-      REQUIRES_SHARED(Locks::mutator_lock_) {
-    return GetSubtypeCheckInfo(klass).GetState();
-  }
-
   // Retrieve the path to root bitstring as a plain uintN_t value that is amenable to
   // be used by a fast check "encoded_src & mask_target == encoded_target".
   //
@@ -316,9 +305,8 @@
   static BitString::StorageType GetEncodedPathToRootForTarget(ClassPtr klass)
       REQUIRES(Locks::subtype_check_lock_)
       REQUIRES_SHARED(Locks::mutator_lock_) {
-    SubtypeCheckInfo sci = GetSubtypeCheckInfo(klass);
-    DCHECK_EQ(SubtypeCheckInfo::kAssigned, sci.GetState());
-    return sci.GetEncodedPathToRoot();
+    DCHECK_EQ(SubtypeCheckInfo::kAssigned, GetSubtypeCheckInfo(klass).GetState());
+    return GetSubtypeCheckInfo(klass).GetEncodedPathToRoot();
   }
 
   // Retrieve the path to root bitstring mask as a plain uintN_t value that is amenable to
@@ -330,9 +318,8 @@
   static BitString::StorageType GetEncodedPathToRootMask(ClassPtr klass)
       REQUIRES(Locks::subtype_check_lock_)
       REQUIRES_SHARED(Locks::mutator_lock_) {
-    SubtypeCheckInfo sci = GetSubtypeCheckInfo(klass);
-    DCHECK_EQ(SubtypeCheckInfo::kAssigned, sci.GetState());
-    return sci.GetEncodedPathToRootMask();
+    DCHECK_EQ(SubtypeCheckInfo::kAssigned, GetSubtypeCheckInfo(klass).GetState());
+    return GetSubtypeCheckInfo(klass).GetEncodedPathToRootMask();
   }
 
   // Is the source class a subclass of the target?
diff --git a/runtime/type_lookup_table.cc b/runtime/type_lookup_table.cc
index 6eb3d83..649a4f9 100644
--- a/runtime/type_lookup_table.cc
+++ b/runtime/type_lookup_table.cc
@@ -66,7 +66,7 @@
 }
 
 TypeLookupTable::TypeLookupTable(const DexFile& dex_file, uint8_t* storage)
-    : dex_file_begin_(dex_file.Begin()),
+    : dex_data_begin_(dex_file.DataBegin()),
       raw_data_length_(RawDataLength(dex_file.NumClassDefs())),
       mask_(CalculateMask(dex_file.NumClassDefs())),
       entries_(storage != nullptr ? reinterpret_cast<Entry*>(storage) : new Entry[mask_ + 1]),
@@ -106,7 +106,7 @@
 TypeLookupTable::TypeLookupTable(const uint8_t* dex_file_pointer,
                                  const uint8_t* raw_data,
                                  uint32_t num_class_defs)
-    : dex_file_begin_(dex_file_pointer),
+    : dex_data_begin_(dex_file_pointer),
       raw_data_length_(RawDataLength(num_class_defs)),
       mask_(CalculateMask(num_class_defs)),
       entries_(reinterpret_cast<Entry*>(const_cast<uint8_t*>(raw_data))),
diff --git a/runtime/type_lookup_table.h b/runtime/type_lookup_table.h
index 6a6f47f..50c93ad 100644
--- a/runtime/type_lookup_table.h
+++ b/runtime/type_lookup_table.h
@@ -43,7 +43,7 @@
 
   // Method search class_def_idx by class descriptor and it's hash.
   // If no data found then the method returns dex::kDexNoIndex.
-  ALWAYS_INLINE uint32_t Lookup(const char* str, uint32_t hash) const {
+  uint32_t Lookup(const char* str, uint32_t hash) const {
     uint32_t pos = hash & GetSizeMask();
     // Thanks to special insertion algorithm, the element at position pos can be empty or start of
     // bucket.
@@ -127,8 +127,8 @@
                   uint32_t num_class_defs);
 
   bool IsStringsEquals(const char* str, uint32_t str_offset) const {
-    const uint8_t* ptr = dex_file_begin_ + str_offset;
-    CHECK(dex_file_begin_ != nullptr);
+    const uint8_t* ptr = dex_data_begin_ + str_offset;
+    CHECK(dex_data_begin_ != nullptr);
     // Skip string length.
     DecodeUnsignedLeb128(&ptr);
     return CompareModifiedUtf8ToModifiedUtf8AsUtf16CodePointValues(
@@ -160,7 +160,7 @@
   // Find the last entry in a chain.
   uint32_t FindLastEntryInBucket(uint32_t cur_pos) const;
 
-  const uint8_t* dex_file_begin_;
+  const uint8_t* dex_data_begin_;
   const uint32_t raw_data_length_;
   const uint32_t mask_;
   std::unique_ptr<Entry[]> entries_;
diff --git a/runtime/vdex_file.cc b/runtime/vdex_file.cc
index cab91df..36ebb17 100644
--- a/runtime/vdex_file.cc
+++ b/runtime/vdex_file.cc
@@ -48,10 +48,12 @@
 
 VdexFile::Header::Header(uint32_t number_of_dex_files,
                          uint32_t dex_size,
+                         uint32_t dex_shared_data_size,
                          uint32_t verifier_deps_size,
                          uint32_t quickening_info_size)
     : number_of_dex_files_(number_of_dex_files),
       dex_size_(dex_size),
+      dex_shared_data_size_(dex_shared_data_size),
       verifier_deps_size_(verifier_deps_size),
       quickening_info_size_(quickening_info_size) {
   memcpy(magic_, kVdexMagic, sizeof(kVdexMagic));
@@ -183,14 +185,17 @@
     // TODO: Supply the location information for a vdex file.
     static constexpr char kVdexLocation[] = "";
     std::string location = DexFileLoader::GetMultiDexLocation(i, kVdexLocation);
-    std::unique_ptr<const DexFile> dex(dex_file_loader.Open(dex_file_start,
-                                                            size,
-                                                            location,
-                                                            GetLocationChecksum(i),
-                                                            nullptr /*oat_dex_file*/,
-                                                            false /*verify*/,
-                                                            false /*verify_checksum*/,
-                                                            error_msg));
+    std::unique_ptr<const DexFile> dex(dex_file_loader.OpenWithDataSection(
+        dex_file_start,
+        size,
+        /*data_base*/ nullptr,
+        /*data_size*/ 0u,
+        location,
+        GetLocationChecksum(i),
+        nullptr /*oat_dex_file*/,
+        false /*verify*/,
+        false /*verify_checksum*/,
+        error_msg));
     if (dex == nullptr) {
       return false;
     }
diff --git a/runtime/vdex_file.h b/runtime/vdex_file.h
index 202380d..b9fd467 100644
--- a/runtime/vdex_file.h
+++ b/runtime/vdex_file.h
@@ -54,6 +54,7 @@
    public:
     Header(uint32_t number_of_dex_files_,
            uint32_t dex_size,
+           uint32_t dex_shared_data_size,
            uint32_t verifier_deps_size,
            uint32_t quickening_info_size);
 
@@ -64,6 +65,7 @@
     bool IsValid() const { return IsMagicValid() && IsVersionValid(); }
 
     uint32_t GetDexSize() const { return dex_size_; }
+    uint32_t GetDexSharedDataSize() const { return dex_shared_data_size_; }
     uint32_t GetVerifierDepsSize() const { return verifier_deps_size_; }
     uint32_t GetQuickeningInfoSize() const { return quickening_info_size_; }
     uint32_t GetNumberOfDexFiles() const { return number_of_dex_files_; }
@@ -72,6 +74,7 @@
       return sizeof(Header) +
              GetSizeOfChecksumsSection() +
              GetDexSize() +
+             GetDexSharedDataSize() +
              GetVerifierDepsSize() +
              GetQuickeningInfoSize();
     }
@@ -84,13 +87,14 @@
 
    private:
     static constexpr uint8_t kVdexMagic[] = { 'v', 'd', 'e', 'x' };
-    // Last update: Use efficient encoding for compact dex code item fields
-    static constexpr uint8_t kVdexVersion[] = { '0', '1', '5', '\0' };
+    // Last update: Separate section for compact dex data.
+    static constexpr uint8_t kVdexVersion[] = { '0', '1', '6', '\0' };
 
     uint8_t magic_[4];
     uint8_t version_[4];
     uint32_t number_of_dex_files_;
     uint32_t dex_size_;
+    uint32_t dex_shared_data_size_;
     uint32_t verifier_deps_size_;
     uint32_t quickening_info_size_;
 
@@ -172,7 +176,8 @@
 
   ArrayRef<const uint8_t> GetVerifierDepsData() const {
     return ArrayRef<const uint8_t>(
-        DexBegin() + GetHeader().GetDexSize(), GetHeader().GetVerifierDepsSize());
+        DexBegin() + GetHeader().GetDexSize() + GetHeader().GetDexSharedDataSize(),
+        GetHeader().GetVerifierDepsSize());
   }
 
   ArrayRef<const uint8_t> GetQuickeningInfo() const {
diff --git a/test/670-bitstring-type-check/build b/test/670-bitstring-type-check/build
deleted file mode 100644
index 38307f2..0000000
--- a/test/670-bitstring-type-check/build
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/bin/bash
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Stop if something fails.
-set -e
-
-# Write out the source file.
-
-mkdir src
-cat >src/Main.java <<EOF
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-EOF
-
-for i in {0..8192}; do echo "class Level1Class$i { }" >>src/Main.java; done
-for i in {0..1024}; do echo "class Level2Class$i extends Level1Class0 { }" >>src/Main.java; done
-
-cat >>src/Main.java <<EOF
-class Level3Class0 extends Level2Class0 { }
-class Level4Class0 extends Level3Class0 { }
-class Level5Class0 extends Level4Class0 { }
-class Level6Class0 extends Level5Class0 { }
-class Level7Class0 extends Level6Class0 { }
-class Level8Class0 extends Level7Class0 { }
-class Level9Class0 extends Level8Class0 { }
-
-public class Main {
-  public static void main(String[] args) throws Exception {
-    // 8193 classes at level 1 make sure we shall have an overflow if there are 13 or
-    // less bits for the level 1 character. 1025 classes at level 2 similarly guarantees
-    // an overflow if the number of bits for level 2 character is 10 or less. To test
-    // type checks also for the depth overflow, we provide a hierarchy 9 levels deep.
-
-    // Make sure the bitstrings are initialized.
-    for (int i = 0; i <= 8192; ++i) {
-      Class.forName("Level1Class" + i).newInstance();
-    }
-    for (int i = 0; i <= 1024; ++i) {
-      Class.forName("Level2Class" + i).newInstance();
-    }
-
-    // Note: Using a different class for tests so that verification of Main.main() does
-    // not try to resolve classes used by the tests. This guarantees uninitialized type
-    // check bitstrings when we enter Main.main() and start initializing them above.
-    Helper.testInstanceOf();
-    Helper.testCheckCast();
-  }
-}
-
-class Helper {
-  public static void testInstanceOf() throws Exception {
-    for (int i = 1; i <= 9; ++i) {
-      Object o = createInstance("Level" + i + "Class0");
-      assertTrue(o instanceof Level1Class0);
-      if (o instanceof Level2Class0) {
-        assertFalse(i < 2);
-      } else {
-        assertTrue(i < 2);
-      }
-      if (o instanceof Level3Class0) {
-        assertFalse(i < 3);
-      } else {
-        assertTrue(i < 3);
-      }
-      if (o instanceof Level4Class0) {
-        assertFalse(i < 4);
-      } else {
-        assertTrue(i < 4);
-      }
-      if (o instanceof Level5Class0) {
-        assertFalse(i < 5);
-      } else {
-        assertTrue(i < 5);
-      }
-      if (o instanceof Level6Class0) {
-        assertFalse(i < 6);
-      } else {
-        assertTrue(i < 6);
-      }
-      if (o instanceof Level7Class0) {
-        assertFalse(i < 7);
-      } else {
-        assertTrue(i < 7);
-      }
-      if (o instanceof Level8Class0) {
-        assertFalse(i < 8);
-      } else {
-        assertTrue(i < 8);
-      }
-      if (o instanceof Level9Class0) {
-        assertFalse(i < 9);
-      } else {
-        assertTrue(i < 9);
-      }
-    }
-
-    assertTrue(createInstance("Level1Class8192") instanceof Level1Class8192);
-    assertFalse(createInstance("Level1Class8192") instanceof Level1Class0);
-    assertTrue(createInstance("Level2Class1024") instanceof Level2Class1024);
-    assertTrue(createInstance("Level2Class1024") instanceof Level1Class0);
-    assertFalse(createInstance("Level2Class1024") instanceof Level2Class0);
-  }
-
-  public static void testCheckCast() throws Exception {
-    for (int i = 1; i <= 9; ++i) {
-      Object o = createInstance("Level" + i + "Class0");
-      Level1Class0 l1c0 = (Level1Class0) o;
-      try {
-        Level2Class0 l2c0 = (Level2Class0) o;
-        assertFalse(i < 2);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 2);
-      }
-      try {
-        Level3Class0 l3c0 = (Level3Class0) o;
-        assertFalse(i < 3);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 3);
-      }
-      try {
-        Level4Class0 l4c0 = (Level4Class0) o;
-        assertFalse(i < 4);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 4);
-      }
-      try {
-        Level5Class0 l5c0 = (Level5Class0) o;
-        assertFalse(i < 5);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 5);
-      }
-      try {
-        Level6Class0 l6c0 = (Level6Class0) o;
-        assertFalse(i < 6);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 6);
-      }
-      try {
-        Level7Class0 l7c0 = (Level7Class0) o;
-        assertFalse(i < 7);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 7);
-      }
-      try {
-        Level8Class0 l8c0 = (Level8Class0) o;
-        assertFalse(i < 8);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 8);
-      }
-      try {
-        Level9Class0 l9c0 = (Level9Class0) o;
-        assertFalse(i < 9);
-      } catch (ClassCastException cce) {
-        assertTrue(i < 9);
-      }
-    }
-
-    Level1Class8192 l1c8192 = (Level1Class8192) createInstance("Level1Class8192");
-    try {
-      Level1Class0 l1c0 = (Level1Class0) createInstance("Level1Class8192");
-      throw new AssertionError("Unexpected");
-    } catch (ClassCastException expected) {}
-    Level2Class1024 l2c1024 = (Level2Class1024) createInstance("Level2Class1024");
-    Level1Class0 l1c0 = (Level1Class0) createInstance("Level2Class1024");
-    try {
-      Level2Class0 l2c0 = (Level2Class0) createInstance("Level2Class1024");
-      throw new AssertionError("Unexpected");
-    } catch (ClassCastException expected) {}
-  }
-
-  public static Object createInstance(String className) throws Exception {
-    return Class.forName(className).newInstance();
-  }
-
-  public static void assertTrue(boolean value) throws Exception {
-    if (!value) {
-      throw new AssertionError();
-    }
-  }
-
-  public static void assertFalse(boolean value) throws Exception {
-    if (value) {
-      throw new AssertionError();
-    }
-  }
-}
-EOF
-
-./default-build "$@"
diff --git a/test/670-bitstring-type-check/expected.txt b/test/670-bitstring-type-check/expected.txt
deleted file mode 100644
index e69de29..0000000
--- a/test/670-bitstring-type-check/expected.txt
+++ /dev/null
diff --git a/test/670-bitstring-type-check/info.txt b/test/670-bitstring-type-check/info.txt
deleted file mode 100644
index a34ba86..0000000
--- a/test/670-bitstring-type-check/info.txt
+++ /dev/null
@@ -1 +0,0 @@
-Tests for the bitstring type checks.
diff --git a/tools/prebuilt_libjdwp_art_failures.txt b/tools/prebuilt_libjdwp_art_failures.txt
index 7694a4c..6052f23 100644
--- a/tools/prebuilt_libjdwp_art_failures.txt
+++ b/tools/prebuilt_libjdwp_art_failures.txt
@@ -118,5 +118,11 @@
   result: EXEC_FAILED,
   bug: 69591477,
   name: "org.apache.harmony.jpda.tests.jdwp.VirtualMachine.ExitTest#testExit001"
+},
+{
+  description: "Test is flakey",
+  result: EXEC_FAILED,
+  bug: 70958370,
+  name: "org.apache.harmony.jpda.tests.jdwp.ObjectReference.EnableCollectionTest#testEnableCollection001"
 }
 ]