Revert "Deoptimization-based bce."

This breaks compiling the core image:

 Error after BCE: art::SSAChecker: Instruction 219 in block 1 does not dominate use 221 in block 1.

This reverts commit e295e6ec5beaea31be5d7d3c996cd8cfa2053129.

Change-Id: Ieeb48797d451836ed506ccb940872f1443942e4e
diff --git a/compiler/oat_test.cc b/compiler/oat_test.cc
index 3efb2dc..46aed60 100644
--- a/compiler/oat_test.cc
+++ b/compiler/oat_test.cc
@@ -175,7 +175,7 @@
   EXPECT_EQ(72U, sizeof(OatHeader));
   EXPECT_EQ(4U, sizeof(OatMethodOffsets));
   EXPECT_EQ(28U, sizeof(OatQuickMethodHeader));
-  EXPECT_EQ(92 * GetInstructionSetPointerSize(kRuntimeISA), sizeof(QuickEntryPoints));
+  EXPECT_EQ(91 * GetInstructionSetPointerSize(kRuntimeISA), sizeof(QuickEntryPoints));
 }
 
 TEST_F(OatTest, OatHeaderIsValid) {
diff --git a/compiler/optimizing/bounds_check_elimination.cc b/compiler/optimizing/bounds_check_elimination.cc
index 7444f6e..1d16794 100644
--- a/compiler/optimizing/bounds_check_elimination.cc
+++ b/compiler/optimizing/bounds_check_elimination.cc
@@ -443,31 +443,9 @@
 
 class BCEVisitor : public HGraphVisitor {
  public:
-  // The least number of bounds checks that should be eliminated by triggering
-  // the deoptimization technique.
-  static constexpr size_t kThresholdForAddingDeoptimize = 2;
-
-  // Very large constant index is considered as an anomaly. This is a threshold
-  // beyond which we don't bother to apply the deoptimization technique since
-  // it's likely some AIOOBE will be thrown.
-  static constexpr int32_t kMaxConstantForAddingDeoptimize = INT_MAX - 1024 * 1024;
-
   explicit BCEVisitor(HGraph* graph)
       : HGraphVisitor(graph),
-        maps_(graph->GetBlocks().Size()),
-        need_to_revisit_block_(false) {}
-
-  void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
-    first_constant_index_bounds_check_map_.clear();
-    HGraphVisitor::VisitBasicBlock(block);
-    if (need_to_revisit_block_) {
-      AddComparesWithDeoptimization(block);
-      need_to_revisit_block_ = false;
-      first_constant_index_bounds_check_map_.clear();
-      GetValueRangeMap(block)->clear();
-      HGraphVisitor::VisitBasicBlock(block);
-    }
-  }
+        maps_(graph->GetBlocks().Size()) {}
 
  private:
   // Return the map of proven value ranges at the beginning of a basic block.
@@ -723,26 +701,9 @@
         }
       }
 
-      if (first_constant_index_bounds_check_map_.find(array_length->GetId()) ==
-          first_constant_index_bounds_check_map_.end()) {
-        // Remember the first bounds check against array_length of a constant index.
-        // That bounds check instruction has an associated HEnvironment where we
-        // may add an HDeoptimize to eliminate bounds checks of constant indices
-        // against array_length.
-        first_constant_index_bounds_check_map_.Put(array_length->GetId(), bounds_check);
-      } else {
-        // We've seen it at least twice. It's beneficial to introduce a compare with
-        // deoptimization fallback to eliminate the bounds checks.
-        need_to_revisit_block_ = true;
-      }
-
       // Once we have an array access like 'array[5] = 1', we record array.length >= 6.
       // We currently don't do it for non-constant index since a valid array[i] can't prove
       // a valid array[i-1] yet due to the lower bound side.
-      if (constant == INT_MAX) {
-        // INT_MAX as an index will definitely throw AIOOBE.
-        return;
-      }
       ValueBound lower = ValueBound(nullptr, constant + 1);
       ValueBound upper = ValueBound::Max();
       ValueRange* range = new (GetGraph()->GetArena())
@@ -977,90 +938,8 @@
     }
   }
 
-  void VisitDeoptimize(HDeoptimize* deoptimize) {
-    // Right now it's only HLessThanOrEqual.
-    DCHECK(deoptimize->InputAt(0)->IsLessThanOrEqual());
-    HLessThanOrEqual* less_than_or_equal = deoptimize->InputAt(0)->AsLessThanOrEqual();
-    HInstruction* instruction = less_than_or_equal->InputAt(0);
-    if (instruction->IsArrayLength()) {
-      HInstruction* constant = less_than_or_equal->InputAt(1);
-      DCHECK(constant->IsIntConstant());
-      DCHECK(constant->AsIntConstant()->GetValue() != INT_MAX);
-      ValueBound lower = ValueBound(nullptr, constant->AsIntConstant()->GetValue() + 1);
-      ValueRange* range = new (GetGraph()->GetArena())
-          ValueRange(GetGraph()->GetArena(), lower, ValueBound::Max());
-      GetValueRangeMap(deoptimize->GetBlock())->Overwrite(instruction->GetId(), range);
-    }
-  }
-
-  void AddCompareWithDeoptimization(HInstruction* array_length,
-                                    HIntConstant* const_instr,
-                                    HBasicBlock* block) {
-    DCHECK(array_length->IsArrayLength());
-    ValueRange* range = LookupValueRange(array_length, block);
-    ValueBound lower_bound = range->GetLower();
-    DCHECK(lower_bound.IsConstant());
-    DCHECK(const_instr->GetValue() <= kMaxConstantForAddingDeoptimize);
-    DCHECK_EQ(lower_bound.GetConstant(), const_instr->GetValue() + 1);
-
-    // If array_length is less than lower_const, deoptimize.
-    HBoundsCheck* bounds_check = first_constant_index_bounds_check_map_.Get(
-        array_length->GetId())->AsBoundsCheck();
-    HCondition* cond = new (GetGraph()->GetArena()) HLessThanOrEqual(array_length, const_instr);
-    HDeoptimize* deoptimize = new (GetGraph()->GetArena())
-        HDeoptimize(cond, bounds_check->GetDexPc());
-    block->InsertInstructionBefore(cond, bounds_check);
-    block->InsertInstructionBefore(deoptimize, bounds_check);
-    deoptimize->SetEnvironment(bounds_check->GetEnvironment());
-  }
-
-  void AddComparesWithDeoptimization(HBasicBlock* block) {
-    for (ArenaSafeMap<int, HBoundsCheck*>::iterator it =
-             first_constant_index_bounds_check_map_.begin();
-         it != first_constant_index_bounds_check_map_.end();
-         ++it) {
-      HBoundsCheck* bounds_check = it->second;
-      HArrayLength* array_length = bounds_check->InputAt(1)->AsArrayLength();
-      HIntConstant* lower_bound_const_instr = nullptr;
-      int32_t lower_bound_const = INT_MIN;
-      size_t counter = 0;
-      // Count the constant indexing for which bounds checks haven't
-      // been removed yet.
-      for (HUseIterator<HInstruction*> it2(array_length->GetUses());
-           !it2.Done();
-           it2.Advance()) {
-        HInstruction* user = it2.Current()->GetUser();
-        if (user->GetBlock() == block &&
-            user->IsBoundsCheck() &&
-            user->AsBoundsCheck()->InputAt(0)->IsIntConstant()) {
-          DCHECK_EQ(array_length, user->AsBoundsCheck()->InputAt(1));
-          HIntConstant* const_instr = user->AsBoundsCheck()->InputAt(0)->AsIntConstant();
-          if (const_instr->GetValue() > lower_bound_const) {
-            lower_bound_const = const_instr->GetValue();
-            lower_bound_const_instr = const_instr;
-          }
-          counter++;
-        }
-      }
-      if (counter >= kThresholdForAddingDeoptimize &&
-          lower_bound_const_instr->GetValue() <= kMaxConstantForAddingDeoptimize) {
-        AddCompareWithDeoptimization(array_length, lower_bound_const_instr, block);
-      }
-    }
-  }
-
   std::vector<std::unique_ptr<ArenaSafeMap<int, ValueRange*>>> maps_;
 
-  // Map an HArrayLength instruction's id to the first HBoundsCheck instruction in
-  // a block that checks a constant index against that HArrayLength.
-  SafeMap<int, HBoundsCheck*> first_constant_index_bounds_check_map_;
-
-  // For the block, there is at least one HArrayLength instruction for which there
-  // is more than one bounds check instruction with constant indexing. And it's
-  // beneficial to add a compare instruction that has deoptimization fallback and
-  // eliminate those bounds checks.
-  bool need_to_revisit_block_;
-
   DISALLOW_COPY_AND_ASSIGN(BCEVisitor);
 };
 
diff --git a/compiler/optimizing/bounds_check_elimination_test.cc b/compiler/optimizing/bounds_check_elimination_test.cc
index 5632f2b..24fa583 100644
--- a/compiler/optimizing/bounds_check_elimination_test.cc
+++ b/compiler/optimizing/bounds_check_elimination_test.cc
@@ -289,9 +289,9 @@
   ASSERT_FALSE(IsRemoved(bounds_check));
 }
 
-// array[6] = 1; // Can't eliminate.
-// array[5] = 1; // Can eliminate.
+// array[5] = 1; // Can't eliminate.
 // array[4] = 1; // Can eliminate.
+// array[6] = 1; // Can't eliminate.
 TEST(BoundsCheckEliminationTest, ConstantArrayBoundsElimination) {
   ArenaPool pool;
   ArenaAllocator allocator(&pool);
@@ -319,20 +319,9 @@
 
   HNullCheck* null_check = new (&allocator) HNullCheck(parameter, 0);
   HArrayLength* array_length = new (&allocator) HArrayLength(null_check);
-  HBoundsCheck* bounds_check6 = new (&allocator)
-      HBoundsCheck(constant_6, array_length, 0);
-  HInstruction* array_set = new (&allocator) HArraySet(
-    null_check, bounds_check6, constant_1, Primitive::kPrimInt, 0);
-  block->AddInstruction(null_check);
-  block->AddInstruction(array_length);
-  block->AddInstruction(bounds_check6);
-  block->AddInstruction(array_set);
-
-  null_check = new (&allocator) HNullCheck(parameter, 0);
-  array_length = new (&allocator) HArrayLength(null_check);
   HBoundsCheck* bounds_check5 = new (&allocator)
       HBoundsCheck(constant_5, array_length, 0);
-  array_set = new (&allocator) HArraySet(
+  HInstruction* array_set = new (&allocator) HArraySet(
     null_check, bounds_check5, constant_1, Primitive::kPrimInt, 0);
   block->AddInstruction(null_check);
   block->AddInstruction(array_length);
@@ -350,6 +339,17 @@
   block->AddInstruction(bounds_check4);
   block->AddInstruction(array_set);
 
+  null_check = new (&allocator) HNullCheck(parameter, 0);
+  array_length = new (&allocator) HArrayLength(null_check);
+  HBoundsCheck* bounds_check6 = new (&allocator)
+      HBoundsCheck(constant_6, array_length, 0);
+  array_set = new (&allocator) HArraySet(
+    null_check, bounds_check6, constant_1, Primitive::kPrimInt, 0);
+  block->AddInstruction(null_check);
+  block->AddInstruction(array_length);
+  block->AddInstruction(bounds_check6);
+  block->AddInstruction(array_set);
+
   block->AddInstruction(new (&allocator) HGoto());
 
   HBasicBlock* exit = new (&allocator) HBasicBlock(graph);
@@ -361,9 +361,9 @@
   RunSimplifierAndGvn(graph);
   BoundsCheckElimination bounds_check_elimination(graph);
   bounds_check_elimination.Run();
-  ASSERT_FALSE(IsRemoved(bounds_check6));
-  ASSERT_TRUE(IsRemoved(bounds_check5));
+  ASSERT_FALSE(IsRemoved(bounds_check5));
   ASSERT_TRUE(IsRemoved(bounds_check4));
+  ASSERT_FALSE(IsRemoved(bounds_check6));
 }
 
 // for (int i=initial; i<array.length; i+=increment) { array[i] = 10; }
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 363fbef..0a069a7 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -305,26 +305,6 @@
   DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
 };
 
-class DeoptimizationSlowPathARM : public SlowPathCodeARM {
- public:
-  explicit DeoptimizationSlowPathARM(HInstruction* instruction)
-    : instruction_(instruction) {}
-
-  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
-    __ Bind(GetEntryLabel());
-    SaveLiveRegisters(codegen, instruction_->GetLocations());
-    DCHECK(instruction_->IsDeoptimize());
-    HDeoptimize* deoptimize = instruction_->AsDeoptimize();
-    uint32_t dex_pc = deoptimize->GetDexPc();
-    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
-    arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
-  }
-
- private:
-  HInstruction* const instruction_;
-  DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM);
-};
-
 #undef __
 
 #undef __
@@ -925,17 +905,24 @@
   UNUSED(exit);
 }
 
-void InstructionCodeGeneratorARM::GenerateTestAndBranch(HInstruction* instruction,
-                                                        Label* true_target,
-                                                        Label* false_target,
-                                                        Label* always_true_target) {
-  HInstruction* cond = instruction->InputAt(0);
+void LocationsBuilderARM::VisitIf(HIf* if_instr) {
+  LocationSummary* locations =
+      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
+  HInstruction* cond = if_instr->InputAt(0);
+  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
+    locations->SetInAt(0, Location::RequiresRegister());
+  }
+}
+
+void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
+  HInstruction* cond = if_instr->InputAt(0);
   if (cond->IsIntConstant()) {
     // Constant condition, statically compared against 1.
     int32_t cond_value = cond->AsIntConstant()->GetValue();
     if (cond_value == 1) {
-      if (always_true_target != nullptr) {
-        __ b(always_true_target);
+      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
+                                     if_instr->IfTrueSuccessor())) {
+        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
       }
       return;
     } else {
@@ -944,10 +931,10 @@
   } else {
     if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
       // Condition has been materialized, compare the output to 0
-      DCHECK(instruction->GetLocations()->InAt(0).IsRegister());
-      __ cmp(instruction->GetLocations()->InAt(0).AsRegister<Register>(),
+      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
+      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
              ShifterOperand(0));
-      __ b(true_target, NE);
+      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
     } else {
       // Condition has not been materialized, use its inputs as the
       // comparison and its condition as the branch condition.
@@ -969,55 +956,16 @@
           __ cmp(left, ShifterOperand(temp));
         }
       }
-      __ b(true_target);
+      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
+           ARMCondition(cond->AsCondition()->GetCondition()));
     }
   }
-  if (false_target != nullptr) {
-    __ b(false_target);
+  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
+                                 if_instr->IfFalseSuccessor())) {
+    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
   }
 }
 
-void LocationsBuilderARM::VisitIf(HIf* if_instr) {
-  LocationSummary* locations =
-      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
-  HInstruction* cond = if_instr->InputAt(0);
-  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::RequiresRegister());
-  }
-}
-
-void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
-  Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
-  Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
-  Label* always_true_target = true_target;
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfTrueSuccessor())) {
-    always_true_target = nullptr;
-  }
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfFalseSuccessor())) {
-    false_target = nullptr;
-  }
-  GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
-}
-
-void LocationsBuilderARM::VisitDeoptimize(HDeoptimize* deoptimize) {
-  LocationSummary* locations = new (GetGraph()->GetArena())
-      LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
-  HInstruction* cond = deoptimize->InputAt(0);
-  DCHECK(cond->IsCondition());
-  if (cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::RequiresRegister());
-  }
-}
-
-void InstructionCodeGeneratorARM::VisitDeoptimize(HDeoptimize* deoptimize) {
-  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena())
-      DeoptimizationSlowPathARM(deoptimize);
-  codegen_->AddSlowPath(slow_path);
-  Label* slow_path_entry = slow_path->GetEntryLabel();
-  GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
-}
 
 void LocationsBuilderARM::VisitCondition(HCondition* comp) {
   LocationSummary* locations =
diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h
index 946a8a5..57e1d2f 100644
--- a/compiler/optimizing/code_generator_arm.h
+++ b/compiler/optimizing/code_generator_arm.h
@@ -169,10 +169,6 @@
   void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
   void GenerateImplicitNullCheck(HNullCheck* instruction);
   void GenerateExplicitNullCheck(HNullCheck* instruction);
-  void GenerateTestAndBranch(HInstruction* instruction,
-                             Label* true_target,
-                             Label* false_target,
-                             Label* always_true_target);
 
   ArmAssembler* const assembler_;
   CodeGeneratorARM* const codegen_;
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 44bd399..99283a0 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -375,26 +375,6 @@
   DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
 };
 
-class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
- public:
-  explicit DeoptimizationSlowPathARM64(HInstruction* instruction)
-    : instruction_(instruction) {}
-
-  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
-    __ Bind(GetEntryLabel());
-    SaveLiveRegisters(codegen, instruction_->GetLocations());
-    DCHECK(instruction_->IsDeoptimize());
-    HDeoptimize* deoptimize = instruction_->AsDeoptimize();
-    uint32_t dex_pc = deoptimize->GetDexPc();
-    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
-    arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
-  }
-
- private:
-  HInstruction* const instruction_;
-  DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
-};
-
 #undef __
 
 Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
@@ -1654,18 +1634,25 @@
   }
 }
 
-void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
-                                                          vixl::Label* true_target,
-                                                          vixl::Label* false_target,
-                                                          vixl::Label* always_true_target) {
-  HInstruction* cond = instruction->InputAt(0);
+void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
+  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
+  HInstruction* cond = if_instr->InputAt(0);
+  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
+    locations->SetInAt(0, Location::RequiresRegister());
+  }
+}
+
+void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
+  HInstruction* cond = if_instr->InputAt(0);
   HCondition* condition = cond->AsCondition();
+  vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
+  vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
 
   if (cond->IsIntConstant()) {
     int32_t cond_value = cond->AsIntConstant()->GetValue();
     if (cond_value == 1) {
-      if (always_true_target != nullptr) {
-        __ B(always_true_target);
+      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
+        __ B(true_target);
       }
       return;
     } else {
@@ -1673,9 +1660,9 @@
     }
   } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
     // The condition instruction has been materialized, compare the output to 0.
-    Location cond_val = instruction->GetLocations()->InAt(0);
+    Location cond_val = if_instr->GetLocations()->InAt(0);
     DCHECK(cond_val.IsRegister());
-    __ Cbnz(InputRegisterAt(instruction, 0), true_target);
+    __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
   } else {
     // The condition instruction has not been materialized, use its inputs as
     // the comparison and its condition as the branch condition.
@@ -1693,52 +1680,11 @@
       __ B(arm64_cond, true_target);
     }
   }
-  if (false_target != nullptr) {
+  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
     __ B(false_target);
   }
 }
 
-void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
-  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
-  HInstruction* cond = if_instr->InputAt(0);
-  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::RequiresRegister());
-  }
-}
-
-void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
-  vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
-  vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
-  vixl::Label* always_true_target = true_target;
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfTrueSuccessor())) {
-    always_true_target = nullptr;
-  }
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfFalseSuccessor())) {
-    false_target = nullptr;
-  }
-  GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
-}
-
-void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
-  LocationSummary* locations = new (GetGraph()->GetArena())
-      LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
-  HInstruction* cond = deoptimize->InputAt(0);
-  DCHECK(cond->IsCondition());
-  if (cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::RequiresRegister());
-  }
-}
-
-void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
-  SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
-      DeoptimizationSlowPathARM64(deoptimize);
-  codegen_->AddSlowPath(slow_path);
-  vixl::Label* slow_path_entry = slow_path->GetEntryLabel();
-  GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
-}
-
 void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
   LocationSummary* locations =
       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 860b250..cbb2e5c 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -140,10 +140,6 @@
   void HandleShift(HBinaryOperation* instr);
   void GenerateImplicitNullCheck(HNullCheck* instruction);
   void GenerateExplicitNullCheck(HNullCheck* instruction);
-  void GenerateTestAndBranch(HInstruction* instruction,
-                             vixl::Label* true_target,
-                             vixl::Label* false_target,
-                             vixl::Label* always_true_target);
 
   Arm64Assembler* const assembler_;
   CodeGeneratorARM64* const codegen_;
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 44bbccc..02b9b32 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -324,27 +324,6 @@
   DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
 };
 
-class DeoptimizationSlowPathX86 : public SlowPathCodeX86 {
- public:
-  explicit DeoptimizationSlowPathX86(HInstruction* instruction)
-    : instruction_(instruction) {}
-
-  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
-    __ Bind(GetEntryLabel());
-    SaveLiveRegisters(codegen, instruction_->GetLocations());
-    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pDeoptimize)));
-    // No need to restore live registers.
-    DCHECK(instruction_->IsDeoptimize());
-    HDeoptimize* deoptimize = instruction_->AsDeoptimize();
-    uint32_t dex_pc = deoptimize->GetDexPc();
-    codegen->RecordPcInfo(instruction_, dex_pc, this);
-  }
-
- private:
-  HInstruction* const instruction_;
-  DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
-};
-
 #undef __
 #define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
 
@@ -835,17 +814,24 @@
   UNUSED(exit);
 }
 
-void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
-                                                        Label* true_target,
-                                                        Label* false_target,
-                                                        Label* always_true_target) {
-  HInstruction* cond = instruction->InputAt(0);
+void LocationsBuilderX86::VisitIf(HIf* if_instr) {
+  LocationSummary* locations =
+      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
+  HInstruction* cond = if_instr->InputAt(0);
+  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
+    locations->SetInAt(0, Location::Any());
+  }
+}
+
+void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
+  HInstruction* cond = if_instr->InputAt(0);
   if (cond->IsIntConstant()) {
     // Constant condition, statically compared against 1.
     int32_t cond_value = cond->AsIntConstant()->GetValue();
     if (cond_value == 1) {
-      if (always_true_target != nullptr) {
-        __ jmp(always_true_target);
+      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
+                                     if_instr->IfTrueSuccessor())) {
+        __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
       }
       return;
     } else {
@@ -858,19 +844,20 @@
     // evaluated just before the if, we don't need to evaluate it
     // again.
     bool eflags_set = cond->IsCondition()
-        && cond->AsCondition()->IsBeforeWhenDisregardMoves(instruction);
+        && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
     if (materialized) {
       if (!eflags_set) {
         // Materialized condition, compare against 0.
-        Location lhs = instruction->GetLocations()->InAt(0);
+        Location lhs = if_instr->GetLocations()->InAt(0);
         if (lhs.IsRegister()) {
           __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
         } else {
           __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
         }
-        __ j(kNotEqual, true_target);
+        __ j(kNotEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
       } else {
-        __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
+        __ j(X86Condition(cond->AsCondition()->GetCondition()),
+             codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
       }
     } else {
       Location lhs = cond->GetLocations()->InAt(0);
@@ -889,56 +876,16 @@
       } else {
         __ cmpl(lhs.AsRegister<Register>(), Address(ESP, rhs.GetStackIndex()));
       }
-      __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
+      __ j(X86Condition(cond->AsCondition()->GetCondition()),
+           codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
     }
   }
-  if (false_target != nullptr) {
-    __ jmp(false_target);
+  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
+                                 if_instr->IfFalseSuccessor())) {
+    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
   }
 }
 
-void LocationsBuilderX86::VisitIf(HIf* if_instr) {
-  LocationSummary* locations =
-      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
-  HInstruction* cond = if_instr->InputAt(0);
-  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::Any());
-  }
-}
-
-void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
-  Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
-  Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
-  Label* always_true_target = true_target;
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfTrueSuccessor())) {
-    always_true_target = nullptr;
-  }
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfFalseSuccessor())) {
-    false_target = nullptr;
-  }
-  GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
-}
-
-void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
-  LocationSummary* locations = new (GetGraph()->GetArena())
-      LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
-  HInstruction* cond = deoptimize->InputAt(0);
-  DCHECK(cond->IsCondition());
-  if (cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::Any());
-  }
-}
-
-void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
-  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena())
-      DeoptimizationSlowPathX86(deoptimize);
-  codegen_->AddSlowPath(slow_path);
-  Label* slow_path_entry = slow_path->GetEntryLabel();
-  GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
-}
-
 void LocationsBuilderX86::VisitLocal(HLocal* local) {
   local->SetLocations(nullptr);
 }
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index 7c7365f..c5763de 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -157,10 +157,6 @@
 
   void GenerateImplicitNullCheck(HNullCheck* instruction);
   void GenerateExplicitNullCheck(HNullCheck* instruction);
-  void GenerateTestAndBranch(HInstruction* instruction,
-                             Label* true_target,
-                             Label* false_target,
-                             Label* always_true_target);
 
   X86Assembler* const assembler_;
   CodeGeneratorX86* const codegen_;
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index d8b9450..d09c8f8 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -331,27 +331,6 @@
   DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
 };
 
-class DeoptimizationSlowPathX86_64 : public SlowPathCodeX86_64 {
- public:
-  explicit DeoptimizationSlowPathX86_64(HInstruction* instruction)
-      : instruction_(instruction) {}
-
-  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
-    __ Bind(GetEntryLabel());
-    SaveLiveRegisters(codegen, instruction_->GetLocations());
-    __ gs()->call(
-        Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pDeoptimize), true));
-    DCHECK(instruction_->IsDeoptimize());
-    HDeoptimize* deoptimize = instruction_->AsDeoptimize();
-    uint32_t dex_pc = deoptimize->GetDexPc();
-    codegen->RecordPcInfo(instruction_, dex_pc, this);
-  }
-
- private:
-  HInstruction* const instruction_;
-  DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
-};
-
 #undef __
 #define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
 
@@ -772,17 +751,24 @@
   UNUSED(exit);
 }
 
-void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
-                                                           Label* true_target,
-                                                           Label* false_target,
-                                                           Label* always_true_target) {
-  HInstruction* cond = instruction->InputAt(0);
+void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
+  LocationSummary* locations =
+      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
+  HInstruction* cond = if_instr->InputAt(0);
+  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
+    locations->SetInAt(0, Location::Any());
+  }
+}
+
+void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
+  HInstruction* cond = if_instr->InputAt(0);
   if (cond->IsIntConstant()) {
     // Constant condition, statically compared against 1.
     int32_t cond_value = cond->AsIntConstant()->GetValue();
     if (cond_value == 1) {
-      if (always_true_target != nullptr) {
-        __ jmp(always_true_target);
+      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
+                                     if_instr->IfTrueSuccessor())) {
+        __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
       }
       return;
     } else {
@@ -795,20 +781,21 @@
     // evaluated just before the if, we don't need to evaluate it
     // again.
     bool eflags_set = cond->IsCondition()
-        && cond->AsCondition()->IsBeforeWhenDisregardMoves(instruction);
+        && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
     if (materialized) {
       if (!eflags_set) {
         // Materialized condition, compare against 0.
-        Location lhs = instruction->GetLocations()->InAt(0);
+        Location lhs = if_instr->GetLocations()->InAt(0);
         if (lhs.IsRegister()) {
           __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
         } else {
           __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()),
                   Immediate(0));
         }
-        __ j(kNotEqual, true_target);
+        __ j(kNotEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
       } else {
-        __ j(X86_64Condition(cond->AsCondition()->GetCondition()), true_target);
+        __ j(X86_64Condition(cond->AsCondition()->GetCondition()),
+             codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
       }
     } else {
       Location lhs = cond->GetLocations()->InAt(0);
@@ -826,56 +813,16 @@
         __ cmpl(lhs.AsRegister<CpuRegister>(),
                 Address(CpuRegister(RSP), rhs.GetStackIndex()));
       }
-      __ j(X86_64Condition(cond->AsCondition()->GetCondition()), true_target);
+      __ j(X86_64Condition(cond->AsCondition()->GetCondition()),
+           codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
     }
   }
-  if (false_target != nullptr) {
-    __ jmp(false_target);
+  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
+                                 if_instr->IfFalseSuccessor())) {
+    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
   }
 }
 
-void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
-  LocationSummary* locations =
-      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
-  HInstruction* cond = if_instr->InputAt(0);
-  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::Any());
-  }
-}
-
-void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
-  Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
-  Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
-  Label* always_true_target = true_target;
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfTrueSuccessor())) {
-    always_true_target = nullptr;
-  }
-  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
-                                if_instr->IfFalseSuccessor())) {
-    false_target = nullptr;
-  }
-  GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
-}
-
-void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
-  LocationSummary* locations = new (GetGraph()->GetArena())
-      LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
-  HInstruction* cond = deoptimize->InputAt(0);
-  DCHECK(cond->IsCondition());
-  if (cond->AsCondition()->NeedsMaterialization()) {
-    locations->SetInAt(0, Location::Any());
-  }
-}
-
-void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
-  SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena())
-      DeoptimizationSlowPathX86_64(deoptimize);
-  codegen_->AddSlowPath(slow_path);
-  Label* slow_path_entry = slow_path->GetEntryLabel();
-  GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
-}
-
 void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
   local->SetLocations(nullptr);
 }
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 0683952..707c999 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -163,10 +163,6 @@
   void GenerateExplicitNullCheck(HNullCheck* instruction);
   void PushOntoFPStack(Location source, uint32_t temp_offset,
                        uint32_t stack_adjustment, bool is_float);
-  void GenerateTestAndBranch(HInstruction* instruction,
-                             Label* true_target,
-                             Label* false_target,
-                             Label* always_true_target);
 
   X86_64Assembler* const assembler_;
   CodeGeneratorX86_64* const codegen_;
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index aeb1751..a90ebce 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -696,8 +696,8 @@
   }
 }
 
-bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const {
-  return this == instruction->GetPreviousDisregardingMoves();
+bool HCondition::IsBeforeWhenDisregardMoves(HIf* if_) const {
+  return this == if_->GetPreviousDisregardingMoves();
 }
 
 HConstant* HConstant::NewConstant(ArenaAllocator* allocator, Primitive::Type type, int64_t val) {
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index b10231e..07ff8ba 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -616,7 +616,6 @@
   M(ClinitCheck, Instruction)                                           \
   M(Compare, BinaryOperation)                                           \
   M(Condition, BinaryOperation)                                         \
-  M(Deoptimize, Instruction)                                            \
   M(Div, BinaryOperation)                                               \
   M(DivZeroCheck, Instruction)                                          \
   M(DoubleConstant, Constant)                                           \
@@ -1479,31 +1478,12 @@
 
   DECLARE_INSTRUCTION(If);
 
+  virtual bool IsIfInstruction() const { return true; }
+
  private:
   DISALLOW_COPY_AND_ASSIGN(HIf);
 };
 
-// Deoptimize to interpreter, upon checking a condition.
-class HDeoptimize : public HTemplateInstruction<1> {
- public:
-  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
-      : HTemplateInstruction(SideEffects::None()),
-        dex_pc_(dex_pc) {
-    SetRawInputAt(0, cond);
-  }
-
-  bool NeedsEnvironment() const OVERRIDE { return true; }
-  bool CanThrow() const OVERRIDE { return true; }
-  uint32_t GetDexPc() const { return dex_pc_; }
-
-  DECLARE_INSTRUCTION(Deoptimize);
-
- private:
-  uint32_t dex_pc_;
-
-  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
-};
-
 class HUnaryOperation : public HExpression<1> {
  public:
   HUnaryOperation(Primitive::Type result_type, HInstruction* input)
@@ -1621,8 +1601,8 @@
   void ClearNeedsMaterialization() { needs_materialization_ = false; }
 
   // For code generation purposes, returns whether this instruction is just before
-  // `instruction`, and disregard moves in between.
-  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
+  // `if_`, and disregard moves in between.
+  bool IsBeforeWhenDisregardMoves(HIf* if_) const;
 
   DECLARE_INSTRUCTION(Condition);
 
diff --git a/compiler/optimizing/prepare_for_register_allocation.cc b/compiler/optimizing/prepare_for_register_allocation.cc
index 01faa9d..2d9a2bf 100644
--- a/compiler/optimizing/prepare_for_register_allocation.cc
+++ b/compiler/optimizing/prepare_for_register_allocation.cc
@@ -64,7 +64,7 @@
     needs_materialization = true;
   } else {
     HInstruction* user = condition->GetUses().GetFirst()->GetUser();
-    if (!user->IsIf() && !user->IsDeoptimize()) {
+    if (!user->IsIf()) {
       needs_materialization = true;
     } else {
       // TODO: if there is no intervening instructions with side-effect between this condition
diff --git a/runtime/arch/arm/entrypoints_init_arm.cc b/runtime/arch/arm/entrypoints_init_arm.cc
index 055b5ab..ce0e614 100644
--- a/runtime/arch/arm/entrypoints_init_arm.cc
+++ b/runtime/arch/arm/entrypoints_init_arm.cc
@@ -159,8 +159,6 @@
   qpoints->pThrowNoSuchMethod = art_quick_throw_no_such_method;
   qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
   qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
-
-  qpoints->pDeoptimize = art_quick_deoptimize;
 }
 
 }  // namespace art
diff --git a/runtime/arch/arm64/entrypoints_init_arm64.cc b/runtime/arch/arm64/entrypoints_init_arm64.cc
index 6c787e3..e68d41d 100644
--- a/runtime/arch/arm64/entrypoints_init_arm64.cc
+++ b/runtime/arch/arm64/entrypoints_init_arm64.cc
@@ -151,9 +151,6 @@
   qpoints->pThrowNoSuchMethod = art_quick_throw_no_such_method;
   qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
   qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
-
-  // Deoptimize
-  qpoints->pDeoptimize = art_quick_deoptimize;
 };
 
 }  // namespace art
diff --git a/runtime/arch/x86/entrypoints_init_x86.cc b/runtime/arch/x86/entrypoints_init_x86.cc
index c012173..7cdd2fc 100644
--- a/runtime/arch/x86/entrypoints_init_x86.cc
+++ b/runtime/arch/x86/entrypoints_init_x86.cc
@@ -141,9 +141,6 @@
   qpoints->pThrowNoSuchMethod = art_quick_throw_no_such_method;
   qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
   qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
-
-  // Deoptimize
-  qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_slow_path;
 };
 
 }  // namespace art
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index c5a020a..e59c881 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1542,10 +1542,7 @@
      * will long jump to the upcall with a special exception of -1.
      */
 DEFINE_FUNCTION art_quick_deoptimize
-    pushl %ebx                    // Entry point for a jump. Fake that we were called.
-.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path)  // Entry point for real calls
-                                                             // from compiled slow paths.
-SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
+    pushl %ebx                    // Fake that we were called.
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
     subl LITERAL(12), %esp        // Align stack.
     CFI_ADJUST_CFA_OFFSET(12)
diff --git a/runtime/arch/x86_64/entrypoints_init_x86_64.cc b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
index 3bc0dc4..b25d7a7 100644
--- a/runtime/arch/x86_64/entrypoints_init_x86_64.cc
+++ b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
@@ -146,9 +146,6 @@
   qpoints->pThrowNoSuchMethod = art_quick_throw_no_such_method;
   qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
   qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
-
-  // Deoptimize
-  qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_slow_path;
 #endif  // __APPLE__
 };
 
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index 3a448a5..5edcd96 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1600,11 +1600,7 @@
      * will long jump to the upcall with a special exception of -1.
      */
 DEFINE_FUNCTION art_quick_deoptimize
-    pushq %rsi                     // Entry point for a jump. Fake that we were called.
-                                   // Use hidden arg.
-.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path)  // Entry point for real calls
-                                                             // from compiled slow paths.
-SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
+    pushq %rsi                     // Fake that we were called. Use hidden arg.
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
                                    // Stack should be aligned now.
     movq %gs:THREAD_SELF_OFFSET, %rdi         // Pass Thread.
@@ -1612,6 +1608,7 @@
     int3                           // Unreachable.
 END_FUNCTION art_quick_deoptimize
 
+
     /*
      * String's compareTo.
      *
diff --git a/runtime/asm_support.h b/runtime/asm_support.h
index 0d0017d..b1dbf6f 100644
--- a/runtime/asm_support.h
+++ b/runtime/asm_support.h
@@ -103,7 +103,7 @@
 ADD_TEST_EQ(THREAD_SELF_OFFSET,
             art::Thread::SelfOffset<__SIZEOF_POINTER__>().Int32Value())
 
-#define THREAD_LOCAL_POS_OFFSET (THREAD_CARD_TABLE_OFFSET + 126 * __SIZEOF_POINTER__)
+#define THREAD_LOCAL_POS_OFFSET (THREAD_CARD_TABLE_OFFSET + 125 * __SIZEOF_POINTER__)
 ADD_TEST_EQ(THREAD_LOCAL_POS_OFFSET,
             art::Thread::ThreadLocalPosOffset<__SIZEOF_POINTER__>().Int32Value())
 #define THREAD_LOCAL_END_OFFSET (THREAD_LOCAL_POS_OFFSET + __SIZEOF_POINTER__)
diff --git a/runtime/entrypoints/quick/quick_entrypoints_list.h b/runtime/entrypoints/quick/quick_entrypoints_list.h
index eaf874e..da454f3 100644
--- a/runtime/entrypoints/quick/quick_entrypoints_list.h
+++ b/runtime/entrypoints/quick/quick_entrypoints_list.h
@@ -120,7 +120,6 @@
   V(ThrowNoSuchMethod, void, int32_t) \
   V(ThrowNullPointer, void, void) \
   V(ThrowStackOverflow, void, void*) \
-  V(Deoptimize, void, void) \
 \
   V(A64Load, int64_t, volatile const int64_t *) \
   V(A64Store, void, volatile int64_t *, int64_t)
diff --git a/runtime/entrypoints/runtime_asm_entrypoints.h b/runtime/entrypoints/runtime_asm_entrypoints.h
index bfe7ee8..420e8db 100644
--- a/runtime/entrypoints/runtime_asm_entrypoints.h
+++ b/runtime/entrypoints/runtime_asm_entrypoints.h
@@ -70,8 +70,6 @@
   return reinterpret_cast<const void*>(art_quick_instrumentation_entry);
 }
 
-extern "C" void art_quick_deoptimize_from_compiled_slow_path();
-
 // The return_pc of instrumentation exit stub.
 extern "C" void art_quick_instrumentation_exit();
 static inline const void* GetQuickInstrumentationExitPc() {
diff --git a/runtime/entrypoints_order_test.cc b/runtime/entrypoints_order_test.cc
index 0664fa0..88209a3 100644
--- a/runtime/entrypoints_order_test.cc
+++ b/runtime/entrypoints_order_test.cc
@@ -265,8 +265,7 @@
     EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pThrowDivZero, pThrowNoSuchMethod, sizeof(void*));
     EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pThrowNoSuchMethod, pThrowNullPointer, sizeof(void*));
     EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pThrowNullPointer, pThrowStackOverflow, sizeof(void*));
-    EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pThrowStackOverflow, pDeoptimize, sizeof(void*));
-    EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pDeoptimize, pA64Load, sizeof(void*));
+    EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pThrowStackOverflow, pA64Load, sizeof(void*));
     EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pA64Load, pA64Store, sizeof(void*));
 
     CHECKED(OFFSETOF_MEMBER(QuickEntryPoints, pA64Store)
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index 4801124..686b518 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -401,9 +401,7 @@
                                   // or DexFile::kDexNoIndex if there is none.
     } else {
       const Instruction* instr = Instruction::At(&code_item->insns_[dex_pc]);
-      // For an invoke, use the dex pc of the next instruction.
-      // TODO: should be tested more once b/17586779 is fixed.
-      new_dex_pc = dex_pc + (instr->IsInvoke() ? instr->SizeInCodeUnits() : 0);
+      new_dex_pc = dex_pc + instr->SizeInCodeUnits();  // the dex pc of the next instruction.
     }
     if (new_dex_pc != DexFile::kDexNoIndex) {
       shadow_frame->SetDexPC(new_dex_pc);
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 9fee779..e1a07e9 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -1996,7 +1996,6 @@
   QUICK_ENTRY_POINT_INFO(pThrowNoSuchMethod)
   QUICK_ENTRY_POINT_INFO(pThrowNullPointer)
   QUICK_ENTRY_POINT_INFO(pThrowStackOverflow)
-  QUICK_ENTRY_POINT_INFO(pDeoptimize)
   QUICK_ENTRY_POINT_INFO(pA64Load)
   QUICK_ENTRY_POINT_INFO(pA64Store)
 #undef QUICK_ENTRY_POINT_INFO
diff --git a/test/449-checker-bce/expected.txt b/test/449-checker-bce/expected.txt
index 29d6383..e69de29 100644
--- a/test/449-checker-bce/expected.txt
+++ b/test/449-checker-bce/expected.txt
@@ -1 +0,0 @@
-100
diff --git a/test/449-checker-bce/src/Main.java b/test/449-checker-bce/src/Main.java
index 17039a3..30aa870 100644
--- a/test/449-checker-bce/src/Main.java
+++ b/test/449-checker-bce/src/Main.java
@@ -108,147 +108,29 @@
   }
 
 
-  // CHECK-START: void Main.constantIndexing1(int[]) BCE (before)
+  // CHECK-START: void Main.constantIndexing(int[]) BCE (before)
+  // CHECK: BoundsCheck
+  // CHECK: ArraySet
   // CHECK: BoundsCheck
   // CHECK: ArraySet
   // CHECK: BoundsCheck
   // CHECK: ArraySet
 
-  // CHECK-START: void Main.constantIndexing1(int[]) BCE (after)
-  // CHECK-NOT: Deoptimize
+  // CHECK-START: void Main.constantIndexing(int[]) BCE (after)
   // CHECK: BoundsCheck
   // CHECK: ArraySet
   // CHECK-NOT: BoundsCheck
   // CHECK: ArraySet
+  // CHECK: BoundsCheck
+  // CHECK: ArraySet
 
-  static void constantIndexing1(int[] array) {
+  static void constantIndexing(int[] array) {
     array[5] = 1;
     array[4] = 1;
+    array[6] = 1;
   }
 
 
-  // CHECK-START: void Main.constantIndexing2(int[]) BCE (before)
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-
-  // CHECK-START: void Main.constantIndexing2(int[]) BCE (after)
-  // CHECK: LessThanOrEqual
-  // CHECK: Deoptimize
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-
-  static void constantIndexing2(int[] array) {
-    array[1] = 1;
-    array[2] = 1;
-    array[3] = 1;
-    array[4] = 1;
-    array[-1] = 1;
-  }
-
-
-  // CHECK-START: int[] Main.constantIndexing3(int[], int[], boolean) BCE (before)
-  // CHECK: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-
-  // CHECK-START: int[] Main.constantIndexing3(int[], int[], boolean) BCE (after)
-  // CHECK: LessThanOrEqual
-  // CHECK: Deoptimize
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK: LessThanOrEqual
-  // CHECK: Deoptimize
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArrayGet
-  // CHECK-NOT: BoundsCheck
-  // CHECK: ArraySet
-
-  static int[] constantIndexing3(int[] array1, int[] array2, boolean copy) {
-    if (!copy) {
-      return array1;
-    }
-    array2[0] = array1[0];
-    array2[1] = array1[1];
-    array2[2] = array1[2];
-    array2[3] = array1[3];
-    return array2;
-  }
-
-
-  // CHECK-START: void Main.constantIndexing4(int[]) BCE (before)
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-
-  // CHECK-START: void Main.constantIndexing4(int[]) BCE (after)
-  // CHECK-NOT: LessThanOrEqual
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-
-  // There is only one array access. It's not beneficial
-  // to create a compare with deoptimization instruction.
-  static void constantIndexing4(int[] array) {
-    array[0] = 1;
-  }
-
-
-  // CHECK-START: void Main.constantIndexing5(int[]) BCE (before)
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-
-  // CHECK-START: void Main.constantIndexing5(int[]) BCE (after)
-  // CHECK-NOT: Deoptimize
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-  // CHECK: BoundsCheck
-  // CHECK: ArraySet
-
-  static void constantIndexing5(int[] array) {
-    // We don't apply the deoptimization for very large constant index
-    // since it's likely to be an anomaly and will throw AIOOBE.
-    array[Integer.MAX_VALUE - 1000] = 1;
-    array[Integer.MAX_VALUE - 999] = 1;
-    array[Integer.MAX_VALUE - 998] = 1;
-  }
-
   // CHECK-START: void Main.loopPattern1(int[]) BCE (before)
   // CHECK: BoundsCheck
   // CHECK: ArraySet
@@ -597,21 +479,6 @@
   }
 
 
-  static int foo() {
-    try {
-      // This will cause AIOOBE.
-      constantIndexing2(new int[3]);
-    } catch (ArrayIndexOutOfBoundsException e) {
-      return 99;
-    }
-    return 0;
-  }
-
-
-  // Make sure this method is compiled with optimizing.
-  // CHECK-START: void Main.main(java.lang.String[]) register (after)
-  // CHECK: ParallelMove
-
   public static void main(String[] args) {
     sieve(20);
 
@@ -640,10 +507,5 @@
     if (!isPyramid(array)) {
       System.out.println("pyramid3 failed!");
     }
-
-    // Make sure this value is kept after deoptimization.
-    int i = 1;
-    System.out.println(foo() + i);
   }
-
 }