summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/graph_visualizer.cc2
-rw-r--r--compiler/optimizing/nodes_x86.h2
-rw-r--r--compiler/optimizing/prepare_for_register_allocation.cc108
-rw-r--r--compiler/optimizing/prepare_for_register_allocation_test.cc312
4 files changed, 410 insertions, 14 deletions
diff --git a/compiler/optimizing/graph_visualizer.cc b/compiler/optimizing/graph_visualizer.cc
index a0ec99ffc3..a8d487e51a 100644
--- a/compiler/optimizing/graph_visualizer.cc
+++ b/compiler/optimizing/graph_visualizer.cc
@@ -512,6 +512,8 @@ class HGraphVisualizerPrinter final : public HGraphDelegateVisitor {
void VisitCondition(HCondition* condition) override {
StartAttributeStream("bias") << condition->GetBias();
+ StartAttributeStream("emitted_at_use_site")
+ << std::boolalpha << condition->IsEmittedAtUseSite() << std::noboolalpha;
}
void VisitIf(HIf* if_instr) override {
diff --git a/compiler/optimizing/nodes_x86.h b/compiler/optimizing/nodes_x86.h
index 71c4f7aeeb..491045de99 100644
--- a/compiler/optimizing/nodes_x86.h
+++ b/compiler/optimizing/nodes_x86.h
@@ -59,6 +59,8 @@ class HX86LoadFromConstantTable final : public HExpression<2> {
return InputAt(1)->AsConstant();
}
+ bool CanBeMoved() const override { return true; }
+
DECLARE_INSTRUCTION(X86LoadFromConstantTable);
protected:
diff --git a/compiler/optimizing/prepare_for_register_allocation.cc b/compiler/optimizing/prepare_for_register_allocation.cc
index 1eb340a9b4..c5dbab5f79 100644
--- a/compiler/optimizing/prepare_for_register_allocation.cc
+++ b/compiler/optimizing/prepare_for_register_allocation.cc
@@ -42,7 +42,8 @@ class PrepareForRegisterAllocationVisitor final : public HGraphDelegateVisitor {
void VisitBoundType(HBoundType* bound_type) override;
void VisitArraySet(HArraySet* instruction) override;
void VisitClinitCheck(HClinitCheck* check) override;
- void VisitCondition(HCondition* condition) override;
+ void VisitIf(HIf* if_instr) override;
+ void VisitSelect(HSelect* select) override;
void VisitConstructorFence(HConstructorFence* constructor_fence) override;
void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) override;
void VisitDeoptimize(HDeoptimize* deoptimize) override;
@@ -50,6 +51,7 @@ class PrepareForRegisterAllocationVisitor final : public HGraphDelegateVisitor {
bool CanMoveClinitCheck(HInstruction* input, HInstruction* user) const;
bool CanEmitConditionAt(HCondition* condition, HInstruction* user) const;
+ void TryToMoveConditionToUser(HInstruction* maybe_condition, HInstruction* user);
const CompilerOptions& compiler_options_;
};
@@ -108,6 +110,7 @@ void PrepareForRegisterAllocationVisitor::VisitDeoptimize(HDeoptimize* deoptimiz
deoptimize->ReplaceWith(deoptimize->GuardedInput());
deoptimize->RemoveGuard();
}
+ TryToMoveConditionToUser(deoptimize->InputAt(0), deoptimize);
}
void PrepareForRegisterAllocationVisitor::VisitBoundsCheck(HBoundsCheck* check) {
@@ -206,37 +209,114 @@ void PrepareForRegisterAllocationVisitor::VisitClinitCheck(HClinitCheck* check)
}
}
-bool PrepareForRegisterAllocationVisitor::CanEmitConditionAt(HCondition* condition,
- HInstruction* user) const {
- if (condition->GetNext() != user) {
+// Determine if moving `condition` to `user` would observably extend the lifetime of a reference.
+// By "observably" we understand that the reference would need to be visible to the GC for longer.
+// We're not concerned with the lifetime for the purposes of register allocation here.
+static bool ConditionMoveWouldExtendReferenceLifetime(HCondition* condition, HInstruction* user) {
+ HInstruction* lhs = condition->InputAt(0);
+ if (lhs->GetType() != DataType::Type::kReference) {
+ return false;
+ }
+ HInstruction* rhs = condition->InputAt(1);
+ DCHECK_EQ(rhs->GetType(), DataType::Type::kReference);
+ if (lhs->IsNullConstant() && rhs->IsNullConstant()) {
+ return false;
+ }
+ // Check if the last instruction with environment before `user` has all non-null
+ // inputs in the environment. If so, we would not be extending the lifetime.
+ HInstruction* instruction_with_env = user->GetPrevious();
+ while (instruction_with_env != nullptr &&
+ instruction_with_env != condition &&
+ instruction_with_env->GetEnvironment() == nullptr) {
+ DCHECK(!instruction_with_env->GetSideEffects().Includes(SideEffects::CanTriggerGC()));
+ instruction_with_env = instruction_with_env->GetPrevious();
+ }
+ if (instruction_with_env == nullptr) {
+ // No env use in the user's block. Do not search other blocks. Conservatively assume that
+ // moving the `condition` to the `user` would indeed extend the lifetime of a reference.
+ return true;
+ }
+ if (instruction_with_env == condition) {
+ // There is no instruction with an environment between `condition` and `user`, so moving
+ // the condition before the user shall not observably extend the lifetime of the reference.
return false;
}
+ DCHECK(instruction_with_env->HasEnvironment());
+ auto env_inputs = instruction_with_env->GetEnvironment()->GetEnvInputs();
+ auto extends_lifetime = [&](HInstruction* instruction) {
+ return !instruction->IsNullConstant() &&
+ std::find(env_inputs.begin(), env_inputs.end(), instruction) == env_inputs.end();
+ };
+ return extends_lifetime(lhs) || extends_lifetime(rhs);
+}
+
+bool PrepareForRegisterAllocationVisitor::CanEmitConditionAt(HCondition* condition,
+ HInstruction* user) const {
+ DCHECK(user->IsIf() || user->IsDeoptimize() || user->IsSelect());
if (GetGraph()->IsCompilingBaseline() && compiler_options_.ProfileBranches()) {
// To do branch profiling, we cannot emit conditions at use site.
return false;
}
- if (user->IsIf() || user->IsDeoptimize()) {
- return true;
+ // Move only a single-user `HCondition` to the `user`.
+ if (!condition->HasOnlyOneNonEnvironmentUse()) {
+ return false;
}
+ DCHECK(condition->GetUses().front().GetUser() == user);
- if (user->IsSelect() && user->AsSelect()->GetCondition() == condition) {
- return true;
+ if (condition->GetNext() != user) {
+ // Avoid moving across blocks if the graph has any irreducible loops.
+ if (condition->GetBlock() != user->GetBlock() && GetGraph()->HasIrreducibleLoops()) {
+ return false;
+ }
+ // Avoid extending the lifetime of references by moving the condition.
+ if (ConditionMoveWouldExtendReferenceLifetime(condition, user)) {
+ return false;
+ }
}
- return false;
+ return true;
}
-void PrepareForRegisterAllocationVisitor::VisitCondition(HCondition* condition) {
- if (condition->HasOnlyOneNonEnvironmentUse()) {
- HInstruction* user = condition->GetUses().front().GetUser();
- if (CanEmitConditionAt(condition, user)) {
- condition->MarkEmittedAtUseSite();
+void PrepareForRegisterAllocationVisitor::TryToMoveConditionToUser(HInstruction* maybe_condition,
+ HInstruction* user) {
+ DCHECK(user->IsIf() || user->IsDeoptimize() || user->IsSelect());
+ if (maybe_condition->IsCondition() && CanEmitConditionAt(maybe_condition->AsCondition(), user)) {
+ if (maybe_condition->GetNext() != user) {
+ maybe_condition->MoveBefore(user);
+#ifdef ART_ENABLE_CODEGEN_x86
+ for (HInstruction* input : maybe_condition->GetInputs()) {
+ if (input->IsEmittedAtUseSite()) {
+ DCHECK(input->IsX86LoadFromConstantTable());
+ input->MoveBefore(maybe_condition);
+ HInstruction* inputs_input = input->InputAt(0);
+ DCHECK(inputs_input->IsX86ComputeBaseMethodAddress());
+ if (inputs_input->HasOnlyOneNonEnvironmentUse()) {
+ inputs_input->MoveBefore(input);
+ }
+ }
+ }
+#else // ART_ENABLE_CODEGEN_x86
+ if (kIsDebugBuild) {
+ for (HInstruction* input : maybe_condition->GetInputs()) {
+ CHECK(!input->IsEmittedAtUseSite()) << input->DebugName() << "#" << input->GetId();
+ }
+ }
+#endif
}
+ maybe_condition->MarkEmittedAtUseSite();
}
}
+void PrepareForRegisterAllocationVisitor::VisitIf(HIf* if_instr) {
+ TryToMoveConditionToUser(if_instr->InputAt(0), if_instr);
+}
+
+void PrepareForRegisterAllocationVisitor::VisitSelect(HSelect* select) {
+ TryToMoveConditionToUser(select->GetCondition(), select);
+}
+
void PrepareForRegisterAllocationVisitor::VisitConstructorFence(
HConstructorFence* constructor_fence) {
// Trivially remove redundant HConstructorFence when it immediately follows an HNewInstance
diff --git a/compiler/optimizing/prepare_for_register_allocation_test.cc b/compiler/optimizing/prepare_for_register_allocation_test.cc
new file mode 100644
index 0000000000..a5bbae19a2
--- /dev/null
+++ b/compiler/optimizing/prepare_for_register_allocation_test.cc
@@ -0,0 +1,312 @@
+/*
+ * Copyright (C) 2025 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "prepare_for_register_allocation.h"
+
+#include <gtest/gtest.h>
+
+#include "base/macros.h"
+#include "optimizing_unit_test.h"
+
+namespace art HIDDEN {
+
+class PrepareForRegisterAllocationTest
+ : public CommonCompilerTest, public OptimizingUnitTestHelper {
+ protected:
+ void RunPass() {
+ graph_->BuildDominatorTree();
+ PrepareForRegisterAllocation(graph_, *compiler_options_).Run();
+ }
+};
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionToSelect) {
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid();
+
+ HInstruction* param = MakeParam(DataType::Type::kInt32);
+ HInstruction* zero_const = graph_->GetIntConstant(0);
+ HCondition* condition = MakeCondition(ret, kCondLT, param, zero_const);
+ HSelect* select = MakeSelect(ret, condition, zero_const, param);
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetNext(), select);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionToDeoptimize) {
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid();
+
+ HInstruction* param = MakeParam(DataType::Type::kInt32);
+ HInstruction* zero_const = graph_->GetIntConstant(0);
+ HCondition* condition = MakeCondition(ret, kCondLT, param, zero_const);
+ HDeoptimize* deopt = new (GetAllocator()) HDeoptimize(
+ GetAllocator(), condition, DeoptimizationKind::kAotInlineCache, /*dex_pc=*/ 0u);
+ AddOrInsertInstruction(ret, deopt);
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetNext(), deopt);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionToIf) {
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid();
+ auto [start, left, right] = CreateDiamondPattern(ret);
+
+ HInstruction* param = MakeParam(DataType::Type::kInt32);
+ HInstruction* zero_const = graph_->GetIntConstant(0);
+ HCondition* condition = MakeCondition(start, kCondLT, param, zero_const);
+ HIf* start_if = MakeIf(start, condition);
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionToIfWithMove) {
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid();
+ auto [start, left, right] = CreateDiamondPattern(ret);
+
+ HInstruction* param = MakeParam(DataType::Type::kInt32);
+ HInstruction* zero_const = graph_->GetIntConstant(0);
+ HCondition* condition = MakeCondition(start, kCondLT, param, zero_const);
+ HInstruction* add = MakeBinOp<HAdd>(start, DataType::Type::kInt32, param, param);
+ HIf* start_if = MakeIf(start, condition);
+
+ ASSERT_EQ(condition->GetNext(), add);
+ ASSERT_EQ(add->GetNext(), start_if);
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(add->GetNext(), condition);
+ ASSERT_EQ(condition->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionToIfWithMoveFromPredecessor) {
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid();
+ auto [start, left, right_end] = CreateDiamondPattern(ret);
+ auto [right_start, right_left, right_right] = CreateDiamondPattern(right_end);
+
+ HInstruction* cond_param = MakeParam(DataType::Type::kBool);
+ HInstruction* param = MakeParam(DataType::Type::kInt32);
+ HInstruction* zero_const = graph_->GetIntConstant(0);
+ HCondition* condition = MakeCondition(start, kCondLT, param, zero_const);
+ MakeIf(start, cond_param);
+ // Note: The condition for this `HIf` is in the predecessor block.
+ HIf* right_start_if = MakeIf(right_start, condition);
+
+ ASSERT_NE(condition->GetBlock(), right_start_if->GetBlock());
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetBlock(), right_start_if->GetBlock());
+ ASSERT_EQ(condition->GetNext(), right_start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionPreventedByOtherUse) {
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid();
+ auto [start, left, right] = CreateDiamondPattern(ret);
+
+ HInstruction* param = MakeParam(DataType::Type::kInt32);
+ HInstruction* zero_const = graph_->GetIntConstant(0);
+ HCondition* condition = MakeCondition(start, kCondLT, param, zero_const);
+ HIf* start_if = MakeIf(start, condition);
+
+ // Other use.
+ MakeBinOp<HAdd>(ret, DataType::Type::kInt32, param, condition);
+
+ RunPass();
+
+ ASSERT_TRUE(!condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionPreventedByEnvUse) {
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid();
+ auto [start, left, right] = CreateDiamondPattern(ret);
+
+ HInstruction* param = MakeParam(DataType::Type::kInt32);
+ HInstruction* zero_const = graph_->GetIntConstant(0);
+ HCondition* condition = MakeCondition(start, kCondLT, param, zero_const);
+ HIf* start_if = MakeIf(start, condition);
+
+ // Environment use.
+ MakeInvokeStatic(ret, DataType::Type::kVoid, /*args=*/ {}, /*env=*/ {condition});
+
+ RunPass();
+
+ ASSERT_TRUE(!condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionPrevented_RefNoEnvInBlock) {
+ ScopedObjectAccess soa(Thread::Current());
+ VariableSizedHandleScope vshs(soa.Self());
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid(&vshs);
+ auto [start, left, right_end] = CreateDiamondPattern(ret);
+ auto [right_start, right_left, right_right] = CreateDiamondPattern(right_end);
+
+ HInstruction* cond_param = MakeParam(DataType::Type::kBool);
+ HInstruction* param = MakeParam(DataType::Type::kReference);
+ HInstruction* null_const = graph_->GetNullConstant();
+ HCondition* condition = MakeCondition(start, kCondEQ, param, null_const);
+ MakeIf(start, cond_param);
+ // Note: The condition for this `HIf` is in the predecessor block.
+ HIf* right_start_if = MakeIf(right_start, condition);
+
+ RunPass();
+
+ ASSERT_TRUE(!condition->IsEmittedAtUseSite());
+ ASSERT_NE(condition->GetBlock(), right_start_if->GetBlock()); // Not moved to the `HIf`.
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeCondition_RefsInEnv) {
+ ScopedObjectAccess soa(Thread::Current());
+ VariableSizedHandleScope vshs(soa.Self());
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid(&vshs);
+ auto [start, left, right_end] = CreateDiamondPattern(ret);
+
+ HInstruction* param1 = MakeParam(DataType::Type::kReference);
+ HInstruction* param2 = MakeParam(DataType::Type::kReference);
+ HCondition* condition = MakeCondition(start, kCondEQ, param1, param2);
+
+ // This invoke's environment already contains `param1` and `param2`, so reordering
+ // the `condition` after the invoke would not extend their lifetime for the purpose of GC.
+ HInvoke* invoke =
+ MakeInvokeStatic(start, DataType::Type::kVoid, /*args=*/ {}, /*env=*/ {param1, param2});
+
+ HIf* start_if = MakeIf(start, condition);
+
+ ASSERT_EQ(condition->GetNext(), invoke);
+ ASSERT_EQ(invoke->GetNext(), start_if);
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(invoke->GetNext(), condition);
+ ASSERT_EQ(condition->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeCondition_RefLhsInEnv) {
+ ScopedObjectAccess soa(Thread::Current());
+ VariableSizedHandleScope vshs(soa.Self());
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid(&vshs);
+ auto [start, left, right_end] = CreateDiamondPattern(ret);
+
+ HInstruction* param = MakeParam(DataType::Type::kReference);
+ HInstruction* null_const = graph_->GetNullConstant();
+ HCondition* condition = MakeCondition(start, kCondEQ, param, null_const);
+
+ // This invoke's environment already contains `param`, so reordering the `condition`
+ // after the invoke would not extend its lifetime for the purpose of GC.
+ HInvoke* invoke = MakeInvokeStatic(start, DataType::Type::kVoid, /*args=*/ {}, /*env=*/ {param});
+
+ HIf* start_if = MakeIf(start, condition);
+
+ ASSERT_EQ(condition->GetNext(), invoke);
+ ASSERT_EQ(invoke->GetNext(), start_if);
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(invoke->GetNext(), condition);
+ ASSERT_EQ(condition->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeCondition_RefRhsInEnv) {
+ ScopedObjectAccess soa(Thread::Current());
+ VariableSizedHandleScope vshs(soa.Self());
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid(&vshs);
+ auto [start, left, right_end] = CreateDiamondPattern(ret);
+
+ HInstruction* param = MakeParam(DataType::Type::kReference);
+ HInstruction* null_const = graph_->GetNullConstant();
+ HCondition* condition = MakeCondition(start, kCondEQ, null_const, param);
+
+ // This invoke's environment already contains `param`, so reordering the `condition`
+ // after the invoke would not extend its lifetime for the purpose of GC.
+ HInvoke* invoke = MakeInvokeStatic(start, DataType::Type::kVoid, /*args=*/ {}, /*env=*/ {param});
+
+ HIf* start_if = MakeIf(start, condition);
+
+ ASSERT_EQ(condition->GetNext(), invoke);
+ ASSERT_EQ(invoke->GetNext(), start_if);
+
+ RunPass();
+
+ ASSERT_TRUE(condition->IsEmittedAtUseSite());
+ ASSERT_EQ(invoke->GetNext(), condition);
+ ASSERT_EQ(condition->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionPrevented_RefLhsNotInEnv) {
+ ScopedObjectAccess soa(Thread::Current());
+ VariableSizedHandleScope vshs(soa.Self());
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid(&vshs);
+ auto [start, left, right_end] = CreateDiamondPattern(ret);
+
+ HInstruction* param1 = MakeParam(DataType::Type::kReference);
+ HInstruction* param2 = MakeParam(DataType::Type::kReference);
+ HCondition* condition = MakeCondition(start, kCondEQ, param1, param2);
+
+ // This invoke's environment does not contain `param1`, so reordering the `condition`
+ // after the invoke would need to extend the lifetime of `param1` for the purpose of GC.
+ // We do not want to extend lifetime of references, therefore the optimization is skipped.
+ HInvoke* invoke = MakeInvokeStatic(start, DataType::Type::kVoid, /*args=*/ {}, /*env=*/ {param2});
+
+ HIf* start_if = MakeIf(start, condition);
+
+ ASSERT_EQ(condition->GetNext(), invoke);
+ ASSERT_EQ(invoke->GetNext(), start_if);
+
+ RunPass();
+
+ ASSERT_TRUE(!condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetNext(), invoke);
+ ASSERT_EQ(invoke->GetNext(), start_if);
+}
+
+TEST_F(PrepareForRegisterAllocationTest, MergeConditionPrevented_RefRhsNotInEnv) {
+ ScopedObjectAccess soa(Thread::Current());
+ VariableSizedHandleScope vshs(soa.Self());
+ HBasicBlock* ret = InitEntryMainExitGraphWithReturnVoid(&vshs);
+ auto [start, left, right_end] = CreateDiamondPattern(ret);
+
+ HInstruction* param1 = MakeParam(DataType::Type::kReference);
+ HInstruction* param2 = MakeParam(DataType::Type::kReference);
+ HCondition* condition = MakeCondition(start, kCondEQ, param1, param2);
+
+ // This invoke's environment does not contain `param2`, so reordering the `condition`
+ // after the invoke would need to extend the lifetime of `param2` for the purpose of GC.
+ // We do not want to extend lifetime of references, therefore the optimization is skipped.
+ HInvoke* invoke = MakeInvokeStatic(start, DataType::Type::kVoid, /*args=*/ {}, /*env=*/ {param1});
+
+ HIf* start_if = MakeIf(start, condition);
+
+ ASSERT_EQ(condition->GetNext(), invoke);
+ ASSERT_EQ(invoke->GetNext(), start_if);
+
+ RunPass();
+
+ ASSERT_TRUE(!condition->IsEmittedAtUseSite());
+ ASSERT_EQ(condition->GetNext(), invoke);
+ ASSERT_EQ(invoke->GetNext(), start_if);
+}
+
+} // namespace art