summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2017-12-15 10:08:11 +0000
committer Gerrit Code Review <noreply-gerritcodereview@google.com> 2017-12-15 10:08:11 +0000
commitfe8a8975cd843c9a7922aaf0badae2d47562e9cd (patch)
treeafaa12ffee3f70785607975c59f47e43111df61c /compiler/optimizing
parentc8d910399cfd33550c497cc3e1e05b0396903234 (diff)
parent8758454d380a2b0de1f4a99e9623cfac5460ccdf (diff)
Merge changes Ib1381084,Icb2a838f
* changes: Clean up InstanceOf/CheckCast. X86: Clean up interface type check for heap poisoning.
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator.cc9
-rw-r--r--compiler/optimizing/code_generator.h51
-rw-r--r--compiler/optimizing/code_generator_arm64.cc66
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc66
-rw-r--r--compiler/optimizing/code_generator_x86.cc144
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc141
-rw-r--r--compiler/optimizing/nodes.h20
7 files changed, 236 insertions, 261 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 45eec6d744..dee74e96dc 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -295,15 +295,6 @@ void CodeGenerator::EmitJitRootPatches(uint8_t* code ATTRIBUTE_UNUSED,
DCHECK_EQ(code_generation_data_->GetNumberOfJitClassRoots(), 0u);
}
-size_t CodeGenerator::GetCacheOffset(uint32_t index) {
- return sizeof(GcRoot<mirror::Object>) * index;
-}
-
-size_t CodeGenerator::GetCachePointerOffset(uint32_t index) {
- PointerSize pointer_size = InstructionSetPointerSize(GetInstructionSet());
- return static_cast<size_t>(pointer_size) * index;
-}
-
uint32_t CodeGenerator::GetArrayLengthOffset(HArrayLength* array_length) {
return array_length->IsStringLength()
? mirror::String::CountOffset().Uint32Value()
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 08e4462356..3c5a37f958 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -388,13 +388,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
bool IsBlockedCoreRegister(size_t i) { return blocked_core_registers_[i]; }
bool IsBlockedFloatingPointRegister(size_t i) { return blocked_fpu_registers_[i]; }
- // Helper that returns the pointer offset of an index in an object array.
- // Note: this method assumes we always have the same pointer size, regardless
- // of the architecture.
- static size_t GetCacheOffset(uint32_t index);
- // Pointer variant for ArtMethod and ArtField arrays.
- size_t GetCachePointerOffset(uint32_t index);
-
// Helper that returns the offset of the array's length field.
// Note: Besides the normal arrays, we also use the HArrayLength for
// accessing the String's `count` field in String intrinsics.
@@ -412,6 +405,50 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
Location to2,
DataType::Type type2);
+ static bool InstanceOfNeedsReadBarrier(HInstanceOf* instance_of) {
+ // Used only for kExactCheck, kAbstractClassCheck, kClassHierarchyCheck and kArrayObjectCheck.
+ DCHECK(instance_of->GetTypeCheckKind() == TypeCheckKind::kExactCheck ||
+ instance_of->GetTypeCheckKind() == TypeCheckKind::kAbstractClassCheck ||
+ instance_of->GetTypeCheckKind() == TypeCheckKind::kClassHierarchyCheck ||
+ instance_of->GetTypeCheckKind() == TypeCheckKind::kArrayObjectCheck)
+ << instance_of->GetTypeCheckKind();
+ // If the target class is in the boot image, it's non-moveable and it doesn't matter
+ // if we compare it with a from-space or to-space reference, the result is the same.
+ // It's OK to traverse a class hierarchy jumping between from-space and to-space.
+ return kEmitCompilerReadBarrier && !instance_of->GetTargetClass()->IsInBootImage();
+ }
+
+ static ReadBarrierOption ReadBarrierOptionForInstanceOf(HInstanceOf* instance_of) {
+ return InstanceOfNeedsReadBarrier(instance_of) ? kWithReadBarrier : kWithoutReadBarrier;
+ }
+
+ static bool IsTypeCheckSlowPathFatal(HCheckCast* check_cast) {
+ switch (check_cast->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ case TypeCheckKind::kInterfaceCheck: {
+ bool needs_read_barrier =
+ kEmitCompilerReadBarrier && !check_cast->GetTargetClass()->IsInBootImage();
+ // We do not emit read barriers for HCheckCast, so we can get false negatives
+ // and the slow path shall re-check and simply return if the cast is actually OK.
+ return !needs_read_barrier;
+ }
+ case TypeCheckKind::kArrayCheck:
+ case TypeCheckKind::kUnresolvedCheck:
+ return false;
+ }
+ LOG(FATAL) << "Unreachable";
+ UNREACHABLE();
+ }
+
+ static LocationSummary::CallKind GetCheckCastCallKind(HCheckCast* check_cast) {
+ return (IsTypeCheckSlowPathFatal(check_cast) && !check_cast->CanThrowIntoCatchBlock())
+ ? LocationSummary::kNoCall // In fact, call on a fatal (non-returning) slow path.
+ : LocationSummary::kCallOnSlowPath;
+ }
+
static bool StoreNeedsWriteBarrier(DataType::Type type, HInstruction* value) {
// Check that null value is not represented as an integer constant.
DCHECK(type != DataType::Type::kReference || !value->IsIntConstant());
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 5054a299d3..f9dcb5d6ef 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -478,7 +478,7 @@ class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
__ Bind(GetEntryLabel());
- if (!is_fatal_) {
+ if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
SaveLiveRegisters(codegen, locations);
}
@@ -3822,11 +3822,12 @@ void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
case TypeCheckKind::kExactCheck:
case TypeCheckKind::kAbstractClassCheck:
case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- call_kind =
- kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
- baker_read_barrier_slow_path = kUseBakerReadBarrier;
+ case TypeCheckKind::kArrayObjectCheck: {
+ bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
+ call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
+ baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
break;
+ }
case TypeCheckKind::kArrayCheck:
case TypeCheckKind::kUnresolvedCheck:
case TypeCheckKind::kInterfaceCheck:
@@ -3875,13 +3876,15 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
switch (type_check_kind) {
case TypeCheckKind::kExactCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ Cmp(out, cls);
__ Cset(out, eq);
if (zero.IsLinked()) {
@@ -3891,13 +3894,15 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kAbstractClassCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If the class is abstract, we eagerly fetch the super class of the
// object to avoid doing a comparison we know will fail.
vixl::aarch64::Label loop, success;
@@ -3907,7 +3912,7 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If `out` is null, we use it for the result, and jump to `done`.
__ Cbz(out, &done);
__ Cmp(out, cls);
@@ -3920,13 +3925,15 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kClassHierarchyCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Walk over the class hierarchy to find a match.
vixl::aarch64::Label loop, success;
__ Bind(&loop);
@@ -3937,7 +3944,7 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ Cbnz(out, &loop);
// If `out` is null, we use it for the result, and jump to `done`.
__ B(&done);
@@ -3950,13 +3957,15 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kArrayObjectCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Do an exact check.
vixl::aarch64::Label exact_check;
__ Cmp(out, cls);
@@ -3967,7 +3976,7 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
component_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If `out` is null, we use it for the result, and jump to `done`.
__ Cbz(out, &done);
__ Ldrh(out, HeapOperand(out, primitive_offset));
@@ -4048,26 +4057,8 @@ void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
}
void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
- LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
- bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
-
TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
- switch (type_check_kind) {
- case TypeCheckKind::kExactCheck:
- case TypeCheckKind::kAbstractClassCheck:
- case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
- LocationSummary::kCallOnSlowPath :
- LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
- break;
- case TypeCheckKind::kArrayCheck:
- case TypeCheckKind::kUnresolvedCheck:
- case TypeCheckKind::kInterfaceCheck:
- call_kind = LocationSummary::kCallOnSlowPath;
- break;
- }
-
+ LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
locations->SetInAt(0, Location::RequiresRegister());
@@ -4098,18 +4089,7 @@ void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
const uint32_t object_array_data_offset =
mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
- bool is_type_check_slow_path_fatal = false;
- // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
- // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
- // read barriers is done for performance and code size reasons.
- if (!kEmitCompilerReadBarrier) {
- is_type_check_slow_path_fatal =
- (type_check_kind == TypeCheckKind::kExactCheck ||
- type_check_kind == TypeCheckKind::kAbstractClassCheck ||
- type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
- type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
- !instruction->CanThrowIntoCatchBlock();
- }
+ bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
SlowPathCodeARM64* type_check_slow_path =
new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
instruction, is_type_check_slow_path_fatal);
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 3f8f0c44f3..017598d484 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -619,7 +619,7 @@ class TypeCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
__ Bind(GetEntryLabel());
- if (!is_fatal_) {
+ if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
SaveLiveRegisters(codegen, locations);
}
@@ -7377,11 +7377,12 @@ void LocationsBuilderARMVIXL::VisitInstanceOf(HInstanceOf* instruction) {
case TypeCheckKind::kExactCheck:
case TypeCheckKind::kAbstractClassCheck:
case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- call_kind =
- kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
- baker_read_barrier_slow_path = kUseBakerReadBarrier;
+ case TypeCheckKind::kArrayObjectCheck: {
+ bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
+ call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
+ baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
break;
+ }
case TypeCheckKind::kArrayCheck:
case TypeCheckKind::kUnresolvedCheck:
case TypeCheckKind::kInterfaceCheck:
@@ -7434,13 +7435,15 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
switch (type_check_kind) {
case TypeCheckKind::kExactCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Classes must be equal for the instanceof to succeed.
__ Cmp(out, cls);
// We speculatively set the result to false without changing the condition
@@ -7467,13 +7470,15 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
}
case TypeCheckKind::kAbstractClassCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If the class is abstract, we eagerly fetch the super class of the
// object to avoid doing a comparison we know will fail.
vixl32::Label loop;
@@ -7483,7 +7488,7 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If `out` is null, we use it for the result, and jump to the final label.
__ CompareAndBranchIfZero(out, final_label, /* far_target */ false);
__ Cmp(out, cls);
@@ -7493,13 +7498,15 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
}
case TypeCheckKind::kClassHierarchyCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Walk over the class hierarchy to find a match.
vixl32::Label loop, success;
__ Bind(&loop);
@@ -7510,7 +7517,7 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// This is essentially a null check, but it sets the condition flags to the
// proper value for the code that follows the loop, i.e. not `eq`.
__ Cmp(out, 1);
@@ -7547,13 +7554,15 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
}
case TypeCheckKind::kArrayObjectCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Do an exact check.
vixl32::Label exact_check;
__ Cmp(out, cls);
@@ -7564,7 +7573,7 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
out_loc,
component_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If `out` is null, we use it for the result, and jump to the final label.
__ CompareAndBranchIfZero(out, final_label, /* far_target */ false);
GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
@@ -7654,26 +7663,8 @@ void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction)
}
void LocationsBuilderARMVIXL::VisitCheckCast(HCheckCast* instruction) {
- LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
- bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
-
TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
- switch (type_check_kind) {
- case TypeCheckKind::kExactCheck:
- case TypeCheckKind::kAbstractClassCheck:
- case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
- LocationSummary::kCallOnSlowPath :
- LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
- break;
- case TypeCheckKind::kArrayCheck:
- case TypeCheckKind::kUnresolvedCheck:
- case TypeCheckKind::kInterfaceCheck:
- call_kind = LocationSummary::kCallOnSlowPath;
- break;
- }
-
+ LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
locations->SetInAt(0, Location::RequiresRegister());
@@ -7702,18 +7693,7 @@ void InstructionCodeGeneratorARMVIXL::VisitCheckCast(HCheckCast* instruction) {
const uint32_t object_array_data_offset =
mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
- // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
- // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
- // read barriers is done for performance and code size reasons.
- bool is_type_check_slow_path_fatal = false;
- if (!kEmitCompilerReadBarrier) {
- is_type_check_slow_path_fatal =
- (type_check_kind == TypeCheckKind::kExactCheck ||
- type_check_kind == TypeCheckKind::kAbstractClassCheck ||
- type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
- type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
- !instruction->CanThrowIntoCatchBlock();
- }
+ bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
SlowPathCodeARMVIXL* type_check_slow_path =
new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARMVIXL(
instruction, is_type_check_slow_path_fatal);
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 42ee9db167..ba222fe532 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -317,7 +317,14 @@ class TypeCheckSlowPathX86 : public SlowPathCode {
CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
__ Bind(GetEntryLabel());
- if (!is_fatal_) {
+ if (kPoisonHeapReferences &&
+ instruction_->IsCheckCast() &&
+ instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
+ // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
+ __ UnpoisonHeapReference(locations->InAt(1).AsRegister<Register>());
+ }
+
+ if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
SaveLiveRegisters(codegen, locations);
}
@@ -6386,7 +6393,7 @@ static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
// interface pointer, one for loading the current interface.
// The other checks have one temp for loading the object's class.
static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
- if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
+ if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
return 2;
}
return 1 + NumberOfInstanceOfTemps(type_check_kind);
@@ -6400,11 +6407,12 @@ void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
case TypeCheckKind::kExactCheck:
case TypeCheckKind::kAbstractClassCheck:
case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- call_kind =
- kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
- baker_read_barrier_slow_path = kUseBakerReadBarrier;
+ case TypeCheckKind::kArrayObjectCheck: {
+ bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
+ call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
+ baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
break;
+ }
case TypeCheckKind::kArrayCheck:
case TypeCheckKind::kUnresolvedCheck:
case TypeCheckKind::kInterfaceCheck:
@@ -6452,12 +6460,14 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
switch (type_check_kind) {
case TypeCheckKind::kExactCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
if (cls.IsRegister()) {
__ cmpl(out, cls.AsRegister<Register>());
} else {
@@ -6473,12 +6483,14 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kAbstractClassCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If the class is abstract, we eagerly fetch the super class of the
// object to avoid doing a comparison we know will fail.
NearLabel loop;
@@ -6488,7 +6500,7 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ testl(out, out);
// If `out` is null, we use it for the result, and jump to `done`.
__ j(kEqual, &done);
@@ -6507,12 +6519,14 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kClassHierarchyCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Walk over the class hierarchy to find a match.
NearLabel loop, success;
__ Bind(&loop);
@@ -6528,7 +6542,7 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ testl(out, out);
__ j(kNotEqual, &loop);
// If `out` is null, we use it for the result, and jump to `done`.
@@ -6542,12 +6556,14 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kArrayObjectCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Do an exact check.
NearLabel exact_check;
if (cls.IsRegister()) {
@@ -6563,7 +6579,7 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
component_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ testl(out, out);
// If `out` is null, we use it for the result, and jump to `done`.
__ j(kEqual, &done);
@@ -6647,30 +6663,9 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
}
}
-static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
- switch (type_check_kind) {
- case TypeCheckKind::kExactCheck:
- case TypeCheckKind::kAbstractClassCheck:
- case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- return !throws_into_catch && !kEmitCompilerReadBarrier;
- case TypeCheckKind::kInterfaceCheck:
- return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
- case TypeCheckKind::kArrayCheck:
- case TypeCheckKind::kUnresolvedCheck:
- return false;
- }
- LOG(FATAL) << "Unreachable";
- UNREACHABLE();
-}
-
void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
- bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
- LocationSummary::CallKind call_kind =
- IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch)
- ? LocationSummary::kNoCall
- : LocationSummary::kCallOnSlowPath;
+ LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
locations->SetInAt(0, Location::RequiresRegister());
@@ -6708,12 +6703,7 @@ void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
const uint32_t object_array_data_offset =
mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
- // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
- // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
- // read barriers is done for performance and code size reasons.
- bool is_type_check_slow_path_fatal =
- IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
-
+ bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
SlowPathCode* type_check_slow_path =
new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
instruction, is_type_check_slow_path_fatal);
@@ -6866,44 +6856,40 @@ void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
break;
case TypeCheckKind::kInterfaceCheck: {
- // Fast path for the interface check. Since we compare with a memory location in the inner
- // loop we would need to have cls poisoned. However unpoisoning cls would reset the
- // conditional flags and cause the conditional jump to be incorrect. Therefore we just jump
- // to the slow path if we are running under poisoning.
- if (!kPoisonHeapReferences) {
- // Try to avoid read barriers to improve the fast path. We can not get false positives by
- // doing this.
- // /* HeapReference<Class> */ temp = obj->klass_
- GenerateReferenceLoadTwoRegisters(instruction,
- temp_loc,
- obj_loc,
- class_offset,
- kWithoutReadBarrier);
-
- // /* HeapReference<Class> */ temp = temp->iftable_
- GenerateReferenceLoadTwoRegisters(instruction,
- temp_loc,
- temp_loc,
- iftable_offset,
- kWithoutReadBarrier);
- // Iftable is never null.
- __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
- // Loop through the iftable and check if any class matches.
- NearLabel start_loop;
- __ Bind(&start_loop);
- // Need to subtract first to handle the empty array case.
- __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
- __ j(kNegative, type_check_slow_path->GetEntryLabel());
- // Go to next interface if the classes do not match.
- __ cmpl(cls.AsRegister<Register>(),
- CodeGeneratorX86::ArrayAddress(temp,
- maybe_temp2_loc,
- TIMES_4,
- object_array_data_offset));
- __ j(kNotEqual, &start_loop);
- } else {
- __ jmp(type_check_slow_path->GetEntryLabel());
- }
+ // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
+ // We can not get false positives by doing this.
+ // /* HeapReference<Class> */ temp = obj->klass_
+ GenerateReferenceLoadTwoRegisters(instruction,
+ temp_loc,
+ obj_loc,
+ class_offset,
+ kWithoutReadBarrier);
+
+ // /* HeapReference<Class> */ temp = temp->iftable_
+ GenerateReferenceLoadTwoRegisters(instruction,
+ temp_loc,
+ temp_loc,
+ iftable_offset,
+ kWithoutReadBarrier);
+ // Iftable is never null.
+ __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
+ // Maybe poison the `cls` for direct comparison with memory.
+ __ MaybePoisonHeapReference(cls.AsRegister<Register>());
+ // Loop through the iftable and check if any class matches.
+ NearLabel start_loop;
+ __ Bind(&start_loop);
+ // Need to subtract first to handle the empty array case.
+ __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
+ __ j(kNegative, type_check_slow_path->GetEntryLabel());
+ // Go to next interface if the classes do not match.
+ __ cmpl(cls.AsRegister<Register>(),
+ CodeGeneratorX86::ArrayAddress(temp,
+ maybe_temp2_loc,
+ TIMES_4,
+ object_array_data_offset));
+ __ j(kNotEqual, &start_loop);
+ // If `cls` was poisoned above, unpoison it.
+ __ MaybeUnpoisonHeapReference(cls.AsRegister<Register>());
break;
}
}
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 02fbf234c1..caad7885bd 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -337,7 +337,14 @@ class TypeCheckSlowPathX86_64 : public SlowPathCode {
CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
__ Bind(GetEntryLabel());
- if (!is_fatal_) {
+ if (kPoisonHeapReferences &&
+ instruction_->IsCheckCast() &&
+ instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
+ // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
+ __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
+ }
+
+ if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
SaveLiveRegisters(codegen, locations);
}
@@ -5759,7 +5766,7 @@ void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
}
static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
- if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
+ if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
// We need a temporary for holding the iftable length.
return true;
}
@@ -5786,11 +5793,12 @@ void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
case TypeCheckKind::kExactCheck:
case TypeCheckKind::kAbstractClassCheck:
case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- call_kind =
- kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
- baker_read_barrier_slow_path = kUseBakerReadBarrier;
+ case TypeCheckKind::kArrayObjectCheck: {
+ bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
+ call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
+ baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
break;
+ }
case TypeCheckKind::kArrayCheck:
case TypeCheckKind::kUnresolvedCheck:
case TypeCheckKind::kInterfaceCheck:
@@ -5841,12 +5849,14 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
switch (type_check_kind) {
case TypeCheckKind::kExactCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
if (cls.IsRegister()) {
__ cmpl(out, cls.AsRegister<CpuRegister>());
} else {
@@ -5867,12 +5877,14 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kAbstractClassCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// If the class is abstract, we eagerly fetch the super class of the
// object to avoid doing a comparison we know will fail.
NearLabel loop, success;
@@ -5882,7 +5894,7 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ testl(out, out);
// If `out` is null, we use it for the result, and jump to `done`.
__ j(kEqual, &done);
@@ -5901,12 +5913,14 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kClassHierarchyCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Walk over the class hierarchy to find a match.
NearLabel loop, success;
__ Bind(&loop);
@@ -5922,7 +5936,7 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
super_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ testl(out, out);
__ j(kNotEqual, &loop);
// If `out` is null, we use it for the result, and jump to `done`.
@@ -5936,12 +5950,14 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
}
case TypeCheckKind::kArrayObjectCheck: {
+ ReadBarrierOption read_barrier_option =
+ CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
// /* HeapReference<Class> */ out = obj->klass_
GenerateReferenceLoadTwoRegisters(instruction,
out_loc,
obj_loc,
class_offset,
- kCompilerReadBarrierOption);
+ read_barrier_option);
// Do an exact check.
NearLabel exact_check;
if (cls.IsRegister()) {
@@ -5957,7 +5973,7 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
out_loc,
component_offset,
maybe_temp_loc,
- kCompilerReadBarrierOption);
+ read_barrier_option);
__ testl(out, out);
// If `out` is null, we use it for the result, and jump to `done`.
__ j(kEqual, &done);
@@ -6041,30 +6057,9 @@ void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
}
}
-static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
- switch (type_check_kind) {
- case TypeCheckKind::kExactCheck:
- case TypeCheckKind::kAbstractClassCheck:
- case TypeCheckKind::kClassHierarchyCheck:
- case TypeCheckKind::kArrayObjectCheck:
- return !throws_into_catch && !kEmitCompilerReadBarrier;
- case TypeCheckKind::kInterfaceCheck:
- return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
- case TypeCheckKind::kArrayCheck:
- case TypeCheckKind::kUnresolvedCheck:
- return false;
- }
- LOG(FATAL) << "Unreachable";
- UNREACHABLE();
-}
-
void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
- bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
- bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
- LocationSummary::CallKind call_kind = is_fatal_slow_path
- ? LocationSummary::kNoCall
- : LocationSummary::kCallOnSlowPath;
+ LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
locations->SetInAt(0, Location::RequiresRegister());
@@ -6105,11 +6100,7 @@ void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
const uint32_t object_array_data_offset =
mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
- // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
- // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
- // read barriers is done for performance and code size reasons.
- bool is_type_check_slow_path_fatal =
- IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
+ bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
SlowPathCode* type_check_slow_path =
new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
instruction, is_type_check_slow_path_fatal);
@@ -6263,42 +6254,40 @@ void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
}
case TypeCheckKind::kInterfaceCheck:
- // Fast path for the interface check. We always go slow path for heap poisoning since
- // unpoisoning cls would require an extra temp.
- if (!kPoisonHeapReferences) {
- // Try to avoid read barriers to improve the fast path. We can not get false positives by
- // doing this.
- // /* HeapReference<Class> */ temp = obj->klass_
- GenerateReferenceLoadTwoRegisters(instruction,
- temp_loc,
- obj_loc,
- class_offset,
- kWithoutReadBarrier);
-
- // /* HeapReference<Class> */ temp = temp->iftable_
- GenerateReferenceLoadTwoRegisters(instruction,
- temp_loc,
- temp_loc,
- iftable_offset,
- kWithoutReadBarrier);
- // Iftable is never null.
- __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
- // Loop through the iftable and check if any class matches.
- NearLabel start_loop;
- __ Bind(&start_loop);
- // Need to subtract first to handle the empty array case.
- __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
- __ j(kNegative, type_check_slow_path->GetEntryLabel());
- // Go to next interface if the classes do not match.
- __ cmpl(cls.AsRegister<CpuRegister>(),
- CodeGeneratorX86_64::ArrayAddress(temp,
- maybe_temp2_loc,
- TIMES_4,
- object_array_data_offset));
- __ j(kNotEqual, &start_loop); // Return if same class.
- } else {
- __ jmp(type_check_slow_path->GetEntryLabel());
- }
+ // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
+ // We can not get false positives by doing this.
+ // /* HeapReference<Class> */ temp = obj->klass_
+ GenerateReferenceLoadTwoRegisters(instruction,
+ temp_loc,
+ obj_loc,
+ class_offset,
+ kWithoutReadBarrier);
+
+ // /* HeapReference<Class> */ temp = temp->iftable_
+ GenerateReferenceLoadTwoRegisters(instruction,
+ temp_loc,
+ temp_loc,
+ iftable_offset,
+ kWithoutReadBarrier);
+ // Iftable is never null.
+ __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
+ // Maybe poison the `cls` for direct comparison with memory.
+ __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
+ // Loop through the iftable and check if any class matches.
+ NearLabel start_loop;
+ __ Bind(&start_loop);
+ // Need to subtract first to handle the empty array case.
+ __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
+ __ j(kNegative, type_check_slow_path->GetEntryLabel());
+ // Go to next interface if the classes do not match.
+ __ cmpl(cls.AsRegister<CpuRegister>(),
+ CodeGeneratorX86_64::ArrayAddress(temp,
+ maybe_temp2_loc,
+ TIMES_4,
+ object_array_data_offset));
+ __ j(kNotEqual, &start_loop); // Return if same class.
+ // If `cls` was poisoned above, unpoison it.
+ __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
break;
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index d33f2f1d65..8efe5e5e91 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -6590,7 +6590,7 @@ std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
class HInstanceOf FINAL : public HExpression<2> {
public:
HInstanceOf(HInstruction* object,
- HLoadClass* constant,
+ HLoadClass* target_class,
TypeCheckKind check_kind,
uint32_t dex_pc)
: HExpression(DataType::Type::kBool,
@@ -6599,7 +6599,13 @@ class HInstanceOf FINAL : public HExpression<2> {
SetPackedField<TypeCheckKindField>(check_kind);
SetPackedFlag<kFlagMustDoNullCheck>(true);
SetRawInputAt(0, object);
- SetRawInputAt(1, constant);
+ SetRawInputAt(1, target_class);
+ }
+
+ HLoadClass* GetTargetClass() const {
+ HInstruction* load_class = InputAt(1);
+ DCHECK(load_class->IsLoadClass());
+ return load_class->AsLoadClass();
}
bool IsClonable() const OVERRIDE { return true; }
@@ -6693,14 +6699,20 @@ class HBoundType FINAL : public HExpression<1> {
class HCheckCast FINAL : public HTemplateInstruction<2> {
public:
HCheckCast(HInstruction* object,
- HLoadClass* constant,
+ HLoadClass* target_class,
TypeCheckKind check_kind,
uint32_t dex_pc)
: HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
SetPackedField<TypeCheckKindField>(check_kind);
SetPackedFlag<kFlagMustDoNullCheck>(true);
SetRawInputAt(0, object);
- SetRawInputAt(1, constant);
+ SetRawInputAt(1, target_class);
+ }
+
+ HLoadClass* GetTargetClass() const {
+ HInstruction* load_class = InputAt(1);
+ DCHECK(load_class->IsLoadClass());
+ return load_class->AsLoadClass();
}
bool IsClonable() const OVERRIDE { return true; }