Move MethodLoadKind out of HInvokeStaticOrDirect.

To prepare for using it in HInvokeInterface. For consistency, also move
CodePtrLocation.

Test: test.py
Change-Id: I84f973764275002e2adb71080ebc833b2bafb975
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 4f1ab75..9ff1f73 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -504,19 +504,19 @@
 
   if (invoke->IsInvokeStaticOrDirect()) {
     HInvokeStaticOrDirect* call = invoke->AsInvokeStaticOrDirect();
-    HInvokeStaticOrDirect::MethodLoadKind method_load_kind = call->GetMethodLoadKind();
-    HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = call->GetCodePtrLocation();
-    if (code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+    MethodLoadKind method_load_kind = call->GetMethodLoadKind();
+    CodePtrLocation code_ptr_location = call->GetCodePtrLocation();
+    if (code_ptr_location == CodePtrLocation::kCallCriticalNative) {
       locations->AddTemp(Location::RequiresRegister());  // For target method.
     }
-    if (code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative ||
-        method_load_kind == HInvokeStaticOrDirect::MethodLoadKind::kRecursive) {
+    if (code_ptr_location == CodePtrLocation::kCallCriticalNative ||
+        method_load_kind == MethodLoadKind::kRecursive) {
       // For `kCallCriticalNative` we need the current method as the hidden argument
       // if we reach the dlsym lookup stub for @CriticalNative.
       locations->SetInAt(call->GetCurrentMethodIndex(), visitor->GetMethodLocation());
     } else {
       locations->AddTemp(visitor->GetMethodLocation());
-      if (method_load_kind == HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall) {
+      if (method_load_kind == MethodLoadKind::kRuntimeCall) {
         locations->SetInAt(call->GetCurrentMethodIndex(), Location::RequiresRegister());
       }
     }
@@ -924,7 +924,7 @@
 }
 
 uint32_t CodeGenerator::GetBootImageOffset(HInvokeStaticOrDirect* invoke) {
-  DCHECK_EQ(invoke->GetMethodLoadKind(), HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo);
+  DCHECK_EQ(invoke->GetMethodLoadKind(), MethodLoadKind::kBootImageRelRo);
   ArtMethod* method = invoke->GetResolvedMethod();
   DCHECK(method != nullptr);
   return GetBootImageOffsetImpl(method, ImageHeader::kSectionArtMethods);
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 46c65af..3ff83b4 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -4454,7 +4454,7 @@
     return;
   }
 
-  if (invoke->GetCodePtrLocation() == HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+  if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
     CriticalNativeCallingConventionVisitorARM64 calling_convention_visitor(
         /*for_register_allocation=*/ true);
     CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
@@ -4484,17 +4484,17 @@
   // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
   Location callee_method = temp;  // For all kinds except kRecursive, callee will be in temp.
   switch (invoke->GetMethodLoadKind()) {
-    case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
+    case MethodLoadKind::kStringInit: {
       uint32_t offset =
           GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
       // temp = thread->string_init_entrypoint
       __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
+    case MethodLoadKind::kRecursive:
       callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
+    case MethodLoadKind::kBootImageLinkTimePcRelative: {
       DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
       // Add ADRP with its PC-relative method patch.
       vixl::aarch64::Label* adrp_label =
@@ -4506,7 +4506,7 @@
       EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
+    case MethodLoadKind::kBootImageRelRo: {
       // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
       uint32_t boot_image_offset = GetBootImageOffset(invoke);
       vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
@@ -4517,7 +4517,7 @@
       EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
+    case MethodLoadKind::kBssEntry: {
       // Add ADRP with its PC-relative .bss entry patch.
       vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(invoke->GetMethodReference());
       EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
@@ -4528,11 +4528,11 @@
       EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
+    case MethodLoadKind::kJitDirectAddress:
       // Load method address from literal pool.
       __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
+    case MethodLoadKind::kRuntimeCall: {
       GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
       return;  // No code pointer retrieval; the runtime performs the call directly.
     }
@@ -4552,7 +4552,7 @@
     }
   };
   switch (invoke->GetCodePtrLocation()) {
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
+    case CodePtrLocation::kCallSelf:
       {
         // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
         ExactAssemblyScope eas(GetVIXLAssembler(),
@@ -4562,7 +4562,7 @@
         RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
       }
       break;
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative: {
+    case CodePtrLocation::kCallCriticalNative: {
       size_t out_frame_size =
           PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorARM64,
                                     kAapcs64StackAlignment,
@@ -4597,7 +4597,7 @@
       }
       break;
     }
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
+    case CodePtrLocation::kCallArtMethod:
       call_code_pointer_member(ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize));
       break;
   }
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 88551e0..9b1f5ab 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -3361,7 +3361,7 @@
     return;
   }
 
-  if (invoke->GetCodePtrLocation() == HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+  if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
     CriticalNativeCallingConventionVisitorARMVIXL calling_convention_visitor(
         /*for_register_allocation=*/ true);
     CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
@@ -9034,8 +9034,7 @@
     const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
     ArtMethod* method) {
   if (method->IsIntrinsic() &&
-      desired_dispatch_info.code_ptr_location ==
-          HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+      desired_dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
     // As a work-around for soft-float native ABI interfering with type checks, we are
     // inserting fake calls to Float.floatToRawIntBits() or Double.doubleToRawLongBits()
     // when a float or double argument is passed in core registers but we cannot do that
@@ -9048,7 +9047,7 @@
     for (uint32_t i = 1; i != shorty_len; ++i) {
       if (shorty[i] == 'D' || shorty[i] == 'F') {
         HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
-        dispatch_info.code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+        dispatch_info.code_ptr_location = CodePtrLocation::kCallArtMethod;
         return dispatch_info;
       }
     }
@@ -9060,24 +9059,24 @@
     HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
   Location callee_method = temp;  // For all kinds except kRecursive, callee will be in temp.
   switch (invoke->GetMethodLoadKind()) {
-    case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
+    case MethodLoadKind::kStringInit: {
       uint32_t offset =
           GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
       // temp = thread->string_init_entrypoint
       GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(temp), tr, offset);
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
+    case MethodLoadKind::kRecursive:
       callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
+    case MethodLoadKind::kBootImageLinkTimePcRelative: {
       DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
       PcRelativePatchInfo* labels = NewBootImageMethodPatch(invoke->GetResolvedMethodReference());
       vixl32::Register temp_reg = RegisterFrom(temp);
       EmitMovwMovtPlaceholder(labels, temp_reg);
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
+    case MethodLoadKind::kBootImageRelRo: {
       uint32_t boot_image_offset = GetBootImageOffset(invoke);
       PcRelativePatchInfo* labels = NewBootImageRelRoPatch(boot_image_offset);
       vixl32::Register temp_reg = RegisterFrom(temp);
@@ -9085,7 +9084,7 @@
       GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0);
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
+    case MethodLoadKind::kBssEntry: {
       PcRelativePatchInfo* labels = NewMethodBssEntryPatch(invoke->GetMethodReference());
       vixl32::Register temp_reg = RegisterFrom(temp);
       EmitMovwMovtPlaceholder(labels, temp_reg);
@@ -9093,10 +9092,10 @@
       GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0);
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
+    case MethodLoadKind::kJitDirectAddress:
       __ Mov(RegisterFrom(temp), Operand::From(invoke->GetMethodAddress()));
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
+    case MethodLoadKind::kRuntimeCall: {
       GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
       return;  // No code pointer retrieval; the runtime performs the call directly.
     }
@@ -9117,7 +9116,7 @@
     }
   };
   switch (invoke->GetCodePtrLocation()) {
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
+    case CodePtrLocation::kCallSelf:
       {
         // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
         ExactAssemblyScope aas(GetVIXLAssembler(),
@@ -9127,7 +9126,7 @@
         RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
       }
       break;
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative: {
+    case CodePtrLocation::kCallCriticalNative: {
       size_t out_frame_size =
           PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorARMVIXL,
                                     kAapcsStackAlignment,
@@ -9158,7 +9157,7 @@
       }
       break;
     }
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
+    case CodePtrLocation::kCallArtMethod:
       call_code_pointer_member(ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize));
       break;
   }
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index f520519..e14089b 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -2477,7 +2477,7 @@
     return;
   }
 
-  if (invoke->GetCodePtrLocation() == HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+  if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
     CriticalNativeCallingConventionVisitorX86 calling_convention_visitor(
         /*for_register_allocation=*/ true);
     CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
@@ -5166,17 +5166,17 @@
     HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
   Location callee_method = temp;  // For all kinds except kRecursive, callee will be in temp.
   switch (invoke->GetMethodLoadKind()) {
-    case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
+    case MethodLoadKind::kStringInit: {
       // temp = thread->string_init_entrypoint
       uint32_t offset =
           GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
       __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
+    case MethodLoadKind::kRecursive:
       callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
+    case MethodLoadKind::kBootImageLinkTimePcRelative: {
       DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
       Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
                                                                 temp.AsRegister<Register>());
@@ -5185,7 +5185,7 @@
       RecordBootImageMethodPatch(invoke);
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
+    case MethodLoadKind::kBootImageRelRo: {
       Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
                                                                 temp.AsRegister<Register>());
       __ movl(temp.AsRegister<Register>(), Address(base_reg, kPlaceholder32BitOffset));
@@ -5194,7 +5194,7 @@
           GetBootImageOffset(invoke));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
+    case MethodLoadKind::kBssEntry: {
       Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
                                                                 temp.AsRegister<Register>());
       __ movl(temp.AsRegister<Register>(), Address(base_reg, kPlaceholder32BitOffset));
@@ -5202,21 +5202,21 @@
       // No need for memory fence, thanks to the x86 memory model.
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
+    case MethodLoadKind::kJitDirectAddress:
       __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
+    case MethodLoadKind::kRuntimeCall: {
       GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
       return;  // No code pointer retrieval; the runtime performs the call directly.
     }
   }
 
   switch (invoke->GetCodePtrLocation()) {
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
+    case CodePtrLocation::kCallSelf:
       __ call(GetFrameEntryLabel());
       RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
       break;
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative: {
+    case CodePtrLocation::kCallCriticalNative: {
       size_t out_frame_size =
           PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorX86,
                                     kNativeStackAlignment,
@@ -5265,7 +5265,7 @@
       }
       break;
     }
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
+    case CodePtrLocation::kCallArtMethod:
       // (callee_method + offset_of_quick_compiled_code)()
       __ call(Address(callee_method.AsRegister<Register>(),
                       ArtMethod::EntryPointFromQuickCompiledCodeOffset(
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 210b379..f18fde0 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1002,51 +1002,51 @@
 
   Location callee_method = temp;  // For all kinds except kRecursive, callee will be in temp.
   switch (invoke->GetMethodLoadKind()) {
-    case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
+    case MethodLoadKind::kStringInit: {
       // temp = thread->string_init_entrypoint
       uint32_t offset =
           GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
       __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
+    case MethodLoadKind::kRecursive:
       callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
+    case MethodLoadKind::kBootImageLinkTimePcRelative:
       DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
       __ leal(temp.AsRegister<CpuRegister>(),
               Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
       RecordBootImageMethodPatch(invoke);
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
+    case MethodLoadKind::kBootImageRelRo: {
       // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
       __ movl(temp.AsRegister<CpuRegister>(),
               Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
       RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
+    case MethodLoadKind::kBssEntry: {
       __ movq(temp.AsRegister<CpuRegister>(),
               Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
       RecordMethodBssEntryPatch(invoke);
       // No need for memory fence, thanks to the x86-64 memory model.
       break;
     }
-    case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
+    case MethodLoadKind::kJitDirectAddress:
       Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
       break;
-    case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
+    case MethodLoadKind::kRuntimeCall: {
       GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
       return;  // No code pointer retrieval; the runtime performs the call directly.
     }
   }
 
   switch (invoke->GetCodePtrLocation()) {
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
+    case CodePtrLocation::kCallSelf:
       __ call(&frame_entry_label_);
       RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
       break;
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative: {
+    case CodePtrLocation::kCallCriticalNative: {
       size_t out_frame_size =
           PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorX86_64,
                                     kNativeStackAlignment,
@@ -1084,7 +1084,7 @@
       }
       break;
     }
-    case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
+    case CodePtrLocation::kCallArtMethod:
       // (callee_method + offset_of_quick_compiled_code)()
       __ call(Address(callee_method.AsRegister<CpuRegister>(),
                       ArtMethod::EntryPointFromQuickCompiledCodeOffset(
@@ -2625,7 +2625,7 @@
     return;
   }
 
-  if (invoke->GetCodePtrLocation() == HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+  if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
     CriticalNativeCallingConventionVisitorX86_64 calling_convention_visitor(
         /*for_register_allocation=*/ true);
     CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
diff --git a/compiler/optimizing/critical_native_abi_fixup_arm.cc b/compiler/optimizing/critical_native_abi_fixup_arm.cc
index 94a9730..bf9233f 100644
--- a/compiler/optimizing/critical_native_abi_fixup_arm.cc
+++ b/compiler/optimizing/critical_native_abi_fixup_arm.cc
@@ -29,8 +29,7 @@
 // Fix up FP arguments passed in core registers for call to @CriticalNative by inserting fake calls
 // to Float.floatToRawIntBits() or Double.doubleToRawLongBits() to satisfy type consistency checks.
 static void FixUpArguments(HInvokeStaticOrDirect* invoke) {
-  DCHECK_EQ(invoke->GetCodePtrLocation(),
-            HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative);
+  DCHECK_EQ(invoke->GetCodePtrLocation(), CodePtrLocation::kCallCriticalNative);
   size_t reg = 0u;
   for (size_t i = 0, num_args = invoke->GetNumberOfArguments(); i != num_args; ++i) {
     HInstruction* input = invoke->InputAt(i);
@@ -58,8 +57,8 @@
       }
       // Use arbitrary dispatch info that does not require the method argument.
       HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
-          HInvokeStaticOrDirect::MethodLoadKind::kBssEntry,
-          HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
+          MethodLoadKind::kBssEntry,
+          CodePtrLocation::kCallArtMethod,
           /*method_load_data=*/ 0u
       };
       HBasicBlock* block = invoke->GetBlock();
@@ -98,7 +97,7 @@
       HInstruction* instruction = it.Current();
       if (instruction->IsInvokeStaticOrDirect() &&
           instruction->AsInvokeStaticOrDirect()->GetCodePtrLocation() ==
-              HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+              CodePtrLocation::kCallCriticalNative) {
         FixUpArguments(instruction->AsInvokeStaticOrDirect());
       }
     }
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 4615342..ab28e4b 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -488,8 +488,8 @@
     // normally use an HInvokeVirtual (sharpen the call).
     MethodReference target_method(dex_file_, method_idx);
     HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
-        HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall,
-        HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
+        MethodLoadKind::kRuntimeCall,
+        CodePtrLocation::kCallArtMethod,
         /* method_load_data= */ 0u
     };
     InvokeType invoke_type = dex_compilation_unit_->IsStatic() ? kStatic : kDirect;
@@ -1041,8 +1041,8 @@
   if (is_string_constructor) {
     uint32_t string_init_entry_point = WellKnownClasses::StringInitToEntryPoint(resolved_method);
     HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
-        HInvokeStaticOrDirect::MethodLoadKind::kStringInit,
-        HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
+        MethodLoadKind::kStringInit,
+        CodePtrLocation::kCallArtMethod,
         dchecked_integral_cast<uint64_t>(string_init_entry_point)
     };
     // We pass null for the resolved_method to ensure optimizations
@@ -1107,8 +1107,7 @@
         HSharpening::SharpenInvokeStaticOrDirect(resolved_method,
                                                  has_method_id,
                                                  code_generator_);
-    if (dispatch_info.code_ptr_location ==
-            HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative) {
+    if (dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
       graph_->SetHasDirectCriticalNativeCall(true);
     }
     invoke = new (allocator_) HInvokeStaticOrDirect(allocator_,
diff --git a/compiler/optimizing/intrinsics_utils.h b/compiler/optimizing/intrinsics_utils.h
index d900b21..8c9dd14 100644
--- a/compiler/optimizing/intrinsics_utils.h
+++ b/compiler/optimizing/intrinsics_utils.h
@@ -63,10 +63,9 @@
 
     if (invoke_->IsInvokeStaticOrDirect()) {
       HInvokeStaticOrDirect* invoke_static_or_direct = invoke_->AsInvokeStaticOrDirect();
-      DCHECK_NE(invoke_static_or_direct->GetMethodLoadKind(),
-                HInvokeStaticOrDirect::MethodLoadKind::kRecursive);
+      DCHECK_NE(invoke_static_or_direct->GetMethodLoadKind(), MethodLoadKind::kRecursive);
       DCHECK_NE(invoke_static_or_direct->GetCodePtrLocation(),
-                HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative);
+                CodePtrLocation::kCallCriticalNative);
       codegen->GenerateStaticOrDirectCall(invoke_static_or_direct, method_loc, this);
     } else if (invoke_->IsInvokeVirtual()) {
       codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this);
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 80d4a2f..93d29cd 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -4372,6 +4372,51 @@
   kCanThrow  // Intrinsic may throw exceptions.
 };
 
+// Determines how to load an ArtMethod*.
+enum class MethodLoadKind {
+  // Use a String init ArtMethod* loaded from Thread entrypoints.
+  kStringInit,
+
+  // Use the method's own ArtMethod* loaded by the register allocator.
+  kRecursive,
+
+  // Use PC-relative boot image ArtMethod* address that will be known at link time.
+  // Used for boot image methods referenced by boot image code.
+  kBootImageLinkTimePcRelative,
+
+  // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
+  // Used for app->boot calls with relocatable image.
+  kBootImageRelRo,
+
+  // Load from an entry in the .bss section using a PC-relative load.
+  // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
+  kBssEntry,
+
+  // Use ArtMethod* at a known address, embed the direct address in the code.
+  // Used for for JIT-compiled calls.
+  kJitDirectAddress,
+
+  // Make a runtime call to resolve and call the method. This is the last-resort-kind
+  // used when other kinds are unimplemented on a particular architecture.
+  kRuntimeCall,
+};
+
+// Determines the location of the code pointer of an invoke.
+enum class CodePtrLocation {
+  // Recursive call, use local PC-relative call instruction.
+  kCallSelf,
+
+  // Use native pointer from the Artmethod*.
+  // Used for @CriticalNative to avoid going through the compiled stub. This call goes through
+  // a special resolution stub if the class is not initialized or no native code is registered.
+  kCallCriticalNative,
+
+  // Use code pointer from the ArtMethod*.
+  // Used when we don't know the target code. This is also the last-resort-kind used when
+  // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
+  kCallArtMethod,
+};
+
 class HInvoke : public HVariableInputSizeInstruction {
  public:
   bool NeedsEnvironment() const override;
@@ -4587,51 +4632,6 @@
     kLast = kImplicit
   };
 
-  // Determines how to load the target ArtMethod*.
-  enum class MethodLoadKind {
-    // Use a String init ArtMethod* loaded from Thread entrypoints.
-    kStringInit,
-
-    // Use the method's own ArtMethod* loaded by the register allocator.
-    kRecursive,
-
-    // Use PC-relative boot image ArtMethod* address that will be known at link time.
-    // Used for boot image methods referenced by boot image code.
-    kBootImageLinkTimePcRelative,
-
-    // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
-    // Used for app->boot calls with relocatable image.
-    kBootImageRelRo,
-
-    // Load from an entry in the .bss section using a PC-relative load.
-    // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
-    kBssEntry,
-
-    // Use ArtMethod* at a known address, embed the direct address in the code.
-    // Used for for JIT-compiled calls.
-    kJitDirectAddress,
-
-    // Make a runtime call to resolve and call the method. This is the last-resort-kind
-    // used when other kinds are unimplemented on a particular architecture.
-    kRuntimeCall,
-  };
-
-  // Determines the location of the code pointer.
-  enum class CodePtrLocation {
-    // Recursive call, use local PC-relative call instruction.
-    kCallSelf,
-
-    // Use native pointer from the Artmethod*.
-    // Used for @CriticalNative to avoid going through the compiled stub. This call goes through
-    // a special resolution stub if the class is not initialized or no native code is registered.
-    kCallCriticalNative,
-
-    // Use code pointer from the ArtMethod*.
-    // Used when we don't know the target code. This is also the last-resort-kind used when
-    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
-    kCallArtMethod,
-  };
-
   struct DispatchInfo {
     MethodLoadKind method_load_kind;
     CodePtrLocation code_ptr_location;
@@ -4858,8 +4858,8 @@
   const MethodReference resolved_method_reference_;
   DispatchInfo dispatch_info_;
 };
-std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
-std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::CodePtrLocation rhs);
+std::ostream& operator<<(std::ostream& os, MethodLoadKind rhs);
+std::ostream& operator<<(std::ostream& os, CodePtrLocation rhs);
 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
 
 class HInvokeVirtual final : public HInvoke {
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index f658f8a..67cd200 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -65,8 +65,8 @@
     DCHECK(!(callee->IsConstructor() && callee->GetDeclaringClass()->IsStringClass()));
   }
 
-  HInvokeStaticOrDirect::MethodLoadKind method_load_kind;
-  HInvokeStaticOrDirect::CodePtrLocation code_ptr_location;
+  MethodLoadKind method_load_kind;
+  CodePtrLocation code_ptr_location;
   uint64_t method_load_data = 0u;
 
   // Note: we never call an ArtMethod through a known code pointer, as
@@ -85,61 +85,60 @@
   const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
   if (callee == codegen->GetGraph()->GetArtMethod() && !codegen->GetGraph()->IsDebuggable()) {
     // Recursive call.
-    method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRecursive;
-    code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallSelf;
+    method_load_kind = MethodLoadKind::kRecursive;
+    code_ptr_location = CodePtrLocation::kCallSelf;
   } else if (compiler_options.IsBootImage() || compiler_options.IsBootImageExtension()) {
     if (!compiler_options.GetCompilePic()) {
       // Test configuration, do not sharpen.
-      method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
+      method_load_kind = MethodLoadKind::kRuntimeCall;
     } else if (IsInBootImage(callee)) {
       DCHECK(compiler_options.IsBootImageExtension());
-      method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo;
+      method_load_kind = MethodLoadKind::kBootImageRelRo;
     } else if (BootImageAOTCanEmbedMethod(callee, compiler_options)) {
-      method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative;
+      method_load_kind = MethodLoadKind::kBootImageLinkTimePcRelative;
     } else if (!has_method_id) {
-      method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
+      method_load_kind = MethodLoadKind::kRuntimeCall;
     } else {
       // Use PC-relative access to the .bss methods array.
-      method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kBssEntry;
+      method_load_kind = MethodLoadKind::kBssEntry;
     }
-    code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+    code_ptr_location = CodePtrLocation::kCallArtMethod;
   } else if (compiler_options.IsJitCompiler()) {
     ScopedObjectAccess soa(Thread::Current());
     if (Runtime::Current()->GetJit()->CanEncodeMethod(
             callee,
             compiler_options.IsJitCompilerForSharedCode())) {
-      method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress;
+      method_load_kind = MethodLoadKind::kJitDirectAddress;
       method_load_data = reinterpret_cast<uintptr_t>(callee);
-      code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+      code_ptr_location = CodePtrLocation::kCallArtMethod;
     } else {
       // Do not sharpen.
-      method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
-      code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+      method_load_kind = MethodLoadKind::kRuntimeCall;
+      code_ptr_location = CodePtrLocation::kCallArtMethod;
     }
   } else if (IsInBootImage(callee)) {
     // Use PC-relative access to the .data.bimg.rel.ro methods array.
-    method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo;
-    code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+    method_load_kind = MethodLoadKind::kBootImageRelRo;
+    code_ptr_location = CodePtrLocation::kCallArtMethod;
   } else if (!has_method_id) {
-    method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
-    code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+    method_load_kind = MethodLoadKind::kRuntimeCall;
+    code_ptr_location = CodePtrLocation::kCallArtMethod;
   } else {
     // Use PC-relative access to the .bss methods array.
-    method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kBssEntry;
-    code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+    method_load_kind = MethodLoadKind::kBssEntry;
+    code_ptr_location = CodePtrLocation::kCallArtMethod;
   }
 
-  if (method_load_kind != HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall &&
-      callee->IsCriticalNative()) {
-    DCHECK_NE(method_load_kind, HInvokeStaticOrDirect::MethodLoadKind::kRecursive);
+  if (method_load_kind != MethodLoadKind::kRuntimeCall && callee->IsCriticalNative()) {
+    DCHECK_NE(method_load_kind, MethodLoadKind::kRecursive);
     DCHECK(callee->IsStatic());
-    code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative;
+    code_ptr_location = CodePtrLocation::kCallCriticalNative;
   }
 
   if (codegen->GetGraph()->IsDebuggable()) {
     // For debuggable apps always use the code pointer from ArtMethod
     // so that we don't circumvent instrumentation stubs if installed.
-    code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
+    code_ptr_location = CodePtrLocation::kCallArtMethod;
   }
 
   HInvokeStaticOrDirect::DispatchInfo desired_dispatch_info = {