Revert "Revert "Optimize code generation of check-cast and instance-of.""
This reverts commit 7537437c6a2f89249a48e30effcc27d4e7c5a04f.
Change-Id: If759cb08646e47b62829bebc3c5b1e2f2969cf84
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index 3012346..274a2a6 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -1614,25 +1614,48 @@
}
}
+static TypeCheckKind ComputeTypeCheckKind(Handle<mirror::Class> cls)
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ if (cls->IsInterface()) {
+ return TypeCheckKind::kInterfaceCheck;
+ } else if (cls->IsArrayClass()) {
+ if (cls->GetComponentType()->IsObjectClass()) {
+ return TypeCheckKind::kArrayObjectCheck;
+ } else if (cls->CannotBeAssignedFromOtherTypes()) {
+ return TypeCheckKind::kExactCheck;
+ } else {
+ return TypeCheckKind::kArrayCheck;
+ }
+ } else if (cls->IsFinal()) {
+ return TypeCheckKind::kExactCheck;
+ } else if (cls->IsAbstract()) {
+ return TypeCheckKind::kAbstractClassCheck;
+ } else {
+ return TypeCheckKind::kClassHierarchyCheck;
+ }
+}
+
bool HGraphBuilder::BuildTypeCheck(const Instruction& instruction,
uint8_t destination,
uint8_t reference,
uint16_t type_index,
uint32_t dex_pc) {
- bool type_known_final;
- bool type_known_abstract;
- // `CanAccessTypeWithoutChecks` will tell whether the method being
- // built is trying to access its own class, so that the generated
- // code can optimize for this case. However, the optimization does not
- // work for inlining, so we use `IsOutermostCompilingClass` instead.
- bool dont_use_is_referrers_class;
- bool can_access = compiler_driver_->CanAccessTypeWithoutChecks(
- dex_compilation_unit_->GetDexMethodIndex(), *dex_file_, type_index,
- &type_known_final, &type_known_abstract, &dont_use_is_referrers_class);
- if (!can_access) {
+ ScopedObjectAccess soa(Thread::Current());
+ StackHandleScope<2> hs(soa.Self());
+ Handle<mirror::DexCache> dex_cache(hs.NewHandle(
+ dex_compilation_unit_->GetClassLinker()->FindDexCache(
+ soa.Self(), *dex_compilation_unit_->GetDexFile())));
+ Handle<mirror::Class> resolved_class(hs.NewHandle(dex_cache->GetResolvedType(type_index)));
+
+ if ((resolved_class.Get() == nullptr) ||
+ // TODO: Remove this check once the compiler actually knows which
+ // ArtMethod it is compiling.
+ (GetCompilingClass() == nullptr) ||
+ !GetCompilingClass()->CanAccess(resolved_class.Get())) {
MaybeRecordStat(MethodCompilationStat::kNotCompiledCantAccesType);
return false;
}
+
HInstruction* object = LoadLocal(reference, Primitive::kPrimNot, dex_pc);
HLoadClass* cls = new (arena_) HLoadClass(
graph_->GetCurrentMethod(),
@@ -1641,17 +1664,18 @@
IsOutermostCompilingClass(type_index),
dex_pc);
current_block_->AddInstruction(cls);
+
// The class needs a temporary before being used by the type check.
Temporaries temps(graph_);
temps.Add(cls);
+
+ TypeCheckKind check_kind = ComputeTypeCheckKind(resolved_class);
if (instruction.Opcode() == Instruction::INSTANCE_OF) {
- current_block_->AddInstruction(
- new (arena_) HInstanceOf(object, cls, type_known_final, dex_pc));
+ current_block_->AddInstruction(new (arena_) HInstanceOf(object, cls, check_kind, dex_pc));
UpdateLocal(destination, current_block_->GetLastInstruction(), dex_pc);
} else {
DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST);
- current_block_->AddInstruction(
- new (arena_) HCheckCast(object, cls, type_known_final, dex_pc));
+ current_block_->AddInstruction(new (arena_) HCheckCast(object, cls, check_kind, dex_pc));
}
return true;
}
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 55c9214..d431acf 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -271,7 +271,8 @@
class TypeCheckSlowPathARM : public SlowPathCode {
public:
- explicit TypeCheckSlowPathARM(HInstruction* instruction) : instruction_(instruction) {}
+ TypeCheckSlowPathARM(HInstruction* instruction, bool is_fatal)
+ : instruction_(instruction), is_fatal_(is_fatal) {}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
LocationSummary* locations = instruction_->GetLocations();
@@ -282,7 +283,19 @@
CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
__ Bind(GetEntryLabel());
- SaveLiveRegisters(codegen, locations);
+
+ if (instruction_->IsCheckCast()) {
+ // The codegen for the instruction overwrites `temp`, so put it back in place.
+ Register obj = locations->InAt(0).AsRegister<Register>();
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(temp);
+ }
+
+ if (!is_fatal_) {
+ SaveLiveRegisters(codegen, locations);
+ }
// We're moving two locations to locations that could overlap, so we need a parallel
// move resolver.
@@ -309,14 +322,19 @@
this);
}
- RestoreLiveRegisters(codegen, locations);
- __ b(GetExitLabel());
+ if (!is_fatal_) {
+ RestoreLiveRegisters(codegen, locations);
+ __ b(GetExitLabel());
+ }
}
const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM"; }
+ bool IsFatal() const OVERRIDE { return is_fatal_; }
+
private:
HInstruction* const instruction_;
+ const bool is_fatal_;
DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
};
@@ -4357,15 +4375,34 @@
}
void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
- LocationSummary::CallKind call_kind = instruction->IsClassFinal()
- ? LocationSummary::kNoCall
- : LocationSummary::kCallOnSlowPath;
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::RequiresRegister());
- // The out register is used as a temporary, so it overlaps with the inputs.
- // Note that TypeCheckSlowPathARM uses this register too.
- locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ // The out register is used as a temporary, so it overlaps with the inputs.
+ // Note that TypeCheckSlowPathARM uses this register too.
+ locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetOut(Location::RegisterLocation(R0));
+ }
}
void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
@@ -4374,6 +4411,9 @@
Register cls = locations->InAt(1).AsRegister<Register>();
Register out = locations->Out().AsRegister<Register>();
uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Label done, zero;
SlowPathCode* slow_path = nullptr;
@@ -4382,67 +4422,242 @@
if (instruction->MustDoNullCheck()) {
__ CompareAndBranchIfZero(obj, &zero);
}
- // Compare the class of `obj` with `cls`.
- __ LoadFromOffset(kLoadWord, out, obj, class_offset);
- __ MaybeUnpoisonHeapReference(out);
- __ cmp(out, ShifterOperand(cls));
- if (instruction->IsClassFinal()) {
- // Classes must be equal for the instanceof to succeed.
- __ b(&zero, NE);
- __ LoadImmediate(out, 1);
- __ b(&done);
- } else {
- // If the classes are not equal, we go into a slow path.
- DCHECK(locations->OnlyCallsOnSlowPath());
- slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(instruction);
- codegen_->AddSlowPath(slow_path);
- __ b(slow_path->GetEntryLabel(), NE);
- __ LoadImmediate(out, 1);
- __ b(&done);
+
+ // In case of an interface check, we put the object class into the object register.
+ // This is safe, as the register is caller-save, and the object must be in another
+ // register if it survives the runtime call.
+ Register target = (instruction->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck)
+ ? obj
+ : out;
+ __ LoadFromOffset(kLoadWord, target, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(target);
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck: {
+ __ cmp(out, ShifterOperand(cls));
+ // Classes must be equal for the instanceof to succeed.
+ __ b(&zero, NE);
+ __ LoadImmediate(out, 1);
+ __ b(&done);
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ Label loop;
+ __ Bind(&loop);
+ __ LoadFromOffset(kLoadWord, out, out, super_offset);
+ __ MaybeUnpoisonHeapReference(out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ CompareAndBranchIfZero(out, &done);
+ __ cmp(out, ShifterOperand(cls));
+ __ b(&loop, NE);
+ __ LoadImmediate(out, 1);
+ if (zero.IsLinked()) {
+ __ b(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ Label loop, success;
+ __ Bind(&loop);
+ __ cmp(out, ShifterOperand(cls));
+ __ b(&success, EQ);
+ __ LoadFromOffset(kLoadWord, out, out, super_offset);
+ __ MaybeUnpoisonHeapReference(out);
+ __ CompareAndBranchIfNonZero(out, &loop);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ b(&done);
+ __ Bind(&success);
+ __ LoadImmediate(out, 1);
+ if (zero.IsLinked()) {
+ __ b(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ LoadFromOffset(kLoadWord, out, out, component_offset);
+ __ MaybeUnpoisonHeapReference(out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ CompareAndBranchIfZero(out, &done);
+ __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
+ static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
+ __ CompareAndBranchIfNonZero(out, &zero);
+ __ LoadImmediate(out, 1);
+ __ b(&done);
+ break;
+ }
+ case TypeCheckKind::kArrayCheck: {
+ __ cmp(out, ShifterOperand(cls));
+ DCHECK(locations->OnlyCallsOnSlowPath());
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
+ instruction, /* is_fatal */ false);
+ codegen_->AddSlowPath(slow_path);
+ __ b(slow_path->GetEntryLabel(), NE);
+ __ LoadImmediate(out, 1);
+ if (zero.IsLinked()) {
+ __ b(&done);
+ }
+ break;
+ }
+
+ case TypeCheckKind::kInterfaceCheck:
+ default: {
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ if (zero.IsLinked()) {
+ __ b(&done);
+ }
+ break;
+ }
}
- if (instruction->MustDoNullCheck() || instruction->IsClassFinal()) {
+ if (zero.IsLinked()) {
__ Bind(&zero);
__ LoadImmediate(out, 0);
}
+ if (done.IsLinked()) {
+ __ Bind(&done);
+ }
+
if (slow_path != nullptr) {
__ Bind(slow_path->GetExitLabel());
}
- __ Bind(&done);
}
void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = throws_into_catch
+ ? LocationSummary::kCallOnSlowPath
+ : LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
+
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
- instruction, LocationSummary::kCallOnSlowPath);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::RequiresRegister());
- // Note that TypeCheckSlowPathARM uses this register too.
- locations->AddTemp(Location::RequiresRegister());
+ instruction, call_kind);
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ // Note that TypeCheckSlowPathARM uses this register too.
+ locations->AddTemp(Location::RequiresRegister());
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ }
}
void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
LocationSummary* locations = instruction->GetLocations();
Register obj = locations->InAt(0).AsRegister<Register>();
Register cls = locations->InAt(1).AsRegister<Register>();
- Register temp = locations->GetTemp(0).AsRegister<Register>();
+ Register temp = locations->WillCall()
+ ? Register(kNoRegister)
+ : locations->GetTemp(0).AsRegister<Register>();
+
uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
+ SlowPathCode* slow_path = nullptr;
- SlowPathCode* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(instruction);
- codegen_->AddSlowPath(slow_path);
-
- // avoid null check if we know obj is not null.
- if (instruction->MustDoNullCheck()) {
- __ CompareAndBranchIfZero(obj, slow_path->GetExitLabel());
+ if (!locations->WillCall()) {
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
+ instruction, !locations->CanCall());
+ codegen_->AddSlowPath(slow_path);
}
- // Compare the class of `obj` with `cls`.
- __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
- __ MaybeUnpoisonHeapReference(temp);
- __ cmp(temp, ShifterOperand(cls));
- // The checkcast succeeds if the classes are equal (fast path).
- // Otherwise, we need to go into the slow path to check the types.
- __ b(slow_path->GetEntryLabel(), NE);
- __ Bind(slow_path->GetExitLabel());
+
+ Label done;
+ // Avoid null check if we know obj is not null.
+ if (instruction->MustDoNullCheck()) {
+ __ CompareAndBranchIfZero(obj, &done);
+ }
+
+ if (locations->WillCall()) {
+ __ LoadFromOffset(kLoadWord, obj, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(obj);
+ } else {
+ __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(temp);
+ }
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kArrayCheck: {
+ __ cmp(temp, ShifterOperand(cls));
+ // Jump to slow path for throwing the exception or doing a
+ // more involved array check.
+ __ b(slow_path->GetEntryLabel(), NE);
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ Label loop;
+ __ Bind(&loop);
+ __ LoadFromOffset(kLoadWord, temp, temp, super_offset);
+ __ MaybeUnpoisonHeapReference(temp);
+ // Jump to the slow path to throw the exception.
+ __ CompareAndBranchIfZero(temp, slow_path->GetEntryLabel());
+ __ cmp(temp, ShifterOperand(cls));
+ __ b(&loop, NE);
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ Label loop, success;
+ __ Bind(&loop);
+ __ cmp(temp, ShifterOperand(cls));
+ __ b(&success, EQ);
+ __ LoadFromOffset(kLoadWord, temp, temp, super_offset);
+ __ MaybeUnpoisonHeapReference(temp);
+ __ CompareAndBranchIfNonZero(temp, &loop);
+ // Jump to the slow path to throw the exception.
+ __ b(slow_path->GetEntryLabel());
+ __ Bind(&success);
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ LoadFromOffset(kLoadWord, temp, temp, component_offset);
+ __ MaybeUnpoisonHeapReference(temp);
+ __ CompareAndBranchIfZero(temp, slow_path->GetEntryLabel());
+ __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
+ static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
+ __ CompareAndBranchIfNonZero(temp, slow_path->GetEntryLabel());
+ break;
+ }
+ case TypeCheckKind::kInterfaceCheck:
+ default:
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ break;
+ }
+ __ Bind(&done);
+
+ if (slow_path != nullptr) {
+ __ Bind(slow_path->GetExitLabel());
+ }
}
void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 531b669..580e93e 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -409,7 +409,8 @@
class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
public:
- explicit TypeCheckSlowPathARM64(HInstruction* instruction) : instruction_(instruction) {}
+ TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
+ : instruction_(instruction), is_fatal_(is_fatal) {}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
LocationSummary* locations = instruction_->GetLocations();
@@ -422,7 +423,19 @@
uint32_t dex_pc = instruction_->GetDexPc();
__ Bind(GetEntryLabel());
- SaveLiveRegisters(codegen, locations);
+
+ if (instruction_->IsCheckCast()) {
+ // The codegen for the instruction overwrites `temp`, so put it back in place.
+ Register obj = InputRegisterAt(instruction_, 0);
+ Register temp = WRegisterFrom(locations->GetTemp(0));
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ __ Ldr(temp, HeapOperand(obj, class_offset));
+ arm64_codegen->GetAssembler()->MaybeUnpoisonHeapReference(temp);
+ }
+
+ if (!is_fatal_) {
+ SaveLiveRegisters(codegen, locations);
+ }
// We're moving two locations to locations that could overlap, so we need a parallel
// move resolver.
@@ -445,14 +458,18 @@
CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
}
- RestoreLiveRegisters(codegen, locations);
- __ B(GetExitLabel());
+ if (!is_fatal_) {
+ RestoreLiveRegisters(codegen, locations);
+ __ B(GetExitLabel());
+ }
}
const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
+ bool IsFatal() const { return is_fatal_; }
private:
HInstruction* const instruction_;
+ const bool is_fatal_;
DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
};
@@ -1629,38 +1646,6 @@
__ B(slow_path->GetEntryLabel(), hs);
}
-void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
- LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
- instruction, LocationSummary::kCallOnSlowPath);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::RequiresRegister());
- // Note that TypeCheckSlowPathARM64 uses this register too.
- locations->AddTemp(Location::RequiresRegister());
-}
-
-void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
- Register obj = InputRegisterAt(instruction, 0);;
- Register cls = InputRegisterAt(instruction, 1);;
- Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
-
- SlowPathCodeARM64* slow_path =
- new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction);
- codegen_->AddSlowPath(slow_path);
-
- // Avoid null check if we know obj is not null.
- if (instruction->MustDoNullCheck()) {
- __ Cbz(obj, slow_path->GetExitLabel());
- }
- // Compare the class of `obj` with `cls`.
- __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
- GetAssembler()->MaybeUnpoisonHeapReference(obj_cls.W());
- __ Cmp(obj_cls, cls);
- // The checkcast succeeds if the classes are equal (fast path).
- // Otherwise, we need to go into the slow path to check the types.
- __ B(ne, slow_path->GetEntryLabel());
- __ Bind(slow_path->GetExitLabel());
-}
-
void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
@@ -2254,50 +2239,291 @@
}
void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
- LocationSummary::CallKind call_kind =
- instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::RequiresRegister());
- // The output does overlap inputs.
- // Note that TypeCheckSlowPathARM64 uses this register too.
- locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ // The out register is used as a temporary, so it overlaps with the inputs.
+ // Note that TypeCheckSlowPathARM64 uses this register too.
+ locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
+ locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
+ }
}
void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
LocationSummary* locations = instruction->GetLocations();
- Register obj = InputRegisterAt(instruction, 0);;
- Register cls = InputRegisterAt(instruction, 1);;
+ Register obj = InputRegisterAt(instruction, 0);
+ Register cls = InputRegisterAt(instruction, 1);
Register out = OutputRegister(instruction);
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
- vixl::Label done;
+ vixl::Label done, zero;
+ SlowPathCodeARM64* slow_path = nullptr;
// Return 0 if `obj` is null.
// Avoid null check if we know `obj` is not null.
if (instruction->MustDoNullCheck()) {
+ __ Cbz(obj, &zero);
+ }
+
+ // In case of an interface check, we put the object class into the object register.
+ // This is safe, as the register is caller-save, and the object must be in another
+ // register if it survives the runtime call.
+ Register target = (instruction->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck)
+ ? obj
+ : out;
+ __ Ldr(target, HeapOperand(obj.W(), class_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(target);
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck: {
+ __ Cmp(out, cls);
+ __ Cset(out, eq);
+ if (zero.IsLinked()) {
+ __ B(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ vixl::Label loop, success;
+ __ Bind(&loop);
+ __ Ldr(out, HeapOperand(out, super_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ Cbz(out, &done);
+ __ Cmp(out, cls);
+ __ B(ne, &loop);
+ __ Mov(out, 1);
+ if (zero.IsLinked()) {
+ __ B(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ vixl::Label loop, success;
+ __ Bind(&loop);
+ __ Cmp(out, cls);
+ __ B(eq, &success);
+ __ Ldr(out, HeapOperand(out, super_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(out);
+ __ Cbnz(out, &loop);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ B(&done);
+ __ Bind(&success);
+ __ Mov(out, 1);
+ if (zero.IsLinked()) {
+ __ B(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ Ldr(out, HeapOperand(out, component_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ Cbz(out, &done);
+ __ Ldrh(out, HeapOperand(out, primitive_offset));
+ static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
+ __ Cbnz(out, &zero);
+ __ Mov(out, 1);
+ __ B(&done);
+ break;
+ }
+ case TypeCheckKind::kArrayCheck: {
+ __ Cmp(out, cls);
+ DCHECK(locations->OnlyCallsOnSlowPath());
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
+ instruction, /* is_fatal */ false);
+ codegen_->AddSlowPath(slow_path);
+ __ B(ne, slow_path->GetEntryLabel());
+ __ Mov(out, 1);
+ if (zero.IsLinked()) {
+ __ B(&done);
+ }
+ break;
+ }
+
+ case TypeCheckKind::kInterfaceCheck:
+ default: {
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ if (zero.IsLinked()) {
+ __ B(&done);
+ }
+ break;
+ }
+ }
+
+ if (zero.IsLinked()) {
+ __ Bind(&zero);
__ Mov(out, 0);
+ }
+
+ if (done.IsLinked()) {
+ __ Bind(&done);
+ }
+
+ if (slow_path != nullptr) {
+ __ Bind(slow_path->GetExitLabel());
+ }
+}
+
+void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = throws_into_catch
+ ? LocationSummary::kCallOnSlowPath
+ : LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
+
+ LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
+ instruction, call_kind);
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ // Note that TypeCheckSlowPathARM64 uses this register too.
+ locations->AddTemp(Location::RequiresRegister());
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
+ }
+}
+
+void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
+ LocationSummary* locations = instruction->GetLocations();
+ Register obj = InputRegisterAt(instruction, 0);
+ Register cls = InputRegisterAt(instruction, 1);
+ Register temp;
+ if (!locations->WillCall()) {
+ temp = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
+ }
+
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
+ SlowPathCodeARM64* slow_path = nullptr;
+
+ if (!locations->WillCall()) {
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
+ instruction, !locations->CanCall());
+ codegen_->AddSlowPath(slow_path);
+ }
+
+ vixl::Label done;
+ // Avoid null check if we know obj is not null.
+ if (instruction->MustDoNullCheck()) {
__ Cbz(obj, &done);
}
- // Compare the class of `obj` with `cls`.
- __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
- GetAssembler()->MaybeUnpoisonHeapReference(out.W());
- __ Cmp(out, cls);
- if (instruction->IsClassFinal()) {
- // Classes must be equal for the instanceof to succeed.
- __ Cset(out, eq);
+ if (locations->WillCall()) {
+ __ Ldr(obj, HeapOperand(obj, class_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(obj);
} else {
- // If the classes are not equal, we go into a slow path.
- DCHECK(locations->OnlyCallsOnSlowPath());
- SlowPathCodeARM64* slow_path =
- new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction);
- codegen_->AddSlowPath(slow_path);
- __ B(ne, slow_path->GetEntryLabel());
- __ Mov(out, 1);
- __ Bind(slow_path->GetExitLabel());
+ __ Ldr(temp, HeapOperand(obj, class_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(temp);
}
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kArrayCheck: {
+ __ Cmp(temp, cls);
+ // Jump to slow path for throwing the exception or doing a
+ // more involved array check.
+ __ B(ne, slow_path->GetEntryLabel());
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ vixl::Label loop;
+ __ Bind(&loop);
+ __ Ldr(temp, HeapOperand(temp, super_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(temp);
+ // Jump to the slow path to throw the exception.
+ __ Cbz(temp, slow_path->GetEntryLabel());
+ __ Cmp(temp, cls);
+ __ B(ne, &loop);
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ vixl::Label loop, success;
+ __ Bind(&loop);
+ __ Cmp(temp, cls);
+ __ B(eq, &success);
+ __ Ldr(temp, HeapOperand(temp, super_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(temp);
+ __ Cbnz(temp, &loop);
+ // Jump to the slow path to throw the exception.
+ __ B(slow_path->GetEntryLabel());
+ __ Bind(&success);
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ Ldr(temp, HeapOperand(temp, component_offset));
+ GetAssembler()->MaybeUnpoisonHeapReference(temp);
+ __ Cbz(temp, slow_path->GetEntryLabel());
+ __ Ldrh(temp, HeapOperand(temp, primitive_offset));
+ static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
+ __ Cbnz(temp, slow_path->GetEntryLabel());
+ break;
+ }
+ case TypeCheckKind::kInterfaceCheck:
+ default:
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ break;
+ }
__ Bind(&done);
+
+ if (slow_path != nullptr) {
+ __ Bind(slow_path->GetExitLabel());
+ }
}
void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index bf0d2e2..4722e42 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -2281,7 +2281,7 @@
void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
LocationSummary::CallKind call_kind =
- instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
+ instruction->IsExactCheck() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
locations->SetInAt(0, Location::RequiresRegister());
locations->SetInAt(1, Location::RequiresRegister());
@@ -2305,7 +2305,7 @@
// Compare the class of `obj` with `cls`.
__ LoadFromOffset(kLoadUnsignedWord, out, obj, mirror::Object::ClassOffset().Int32Value());
- if (instruction->IsClassFinal()) {
+ if (instruction->IsExactCheck()) {
// Classes must be equal for the instanceof to succeed.
__ Xor(out, out, cls);
__ Sltiu(out, out, 1);
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 09e939d..3d03dd8 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -287,7 +287,8 @@
class TypeCheckSlowPathX86 : public SlowPathCode {
public:
- explicit TypeCheckSlowPathX86(HInstruction* instruction) : instruction_(instruction) {}
+ TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
+ : instruction_(instruction), is_fatal_(is_fatal) {}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
LocationSummary* locations = instruction_->GetLocations();
@@ -298,7 +299,19 @@
CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
__ Bind(GetEntryLabel());
- SaveLiveRegisters(codegen, locations);
+
+ if (instruction_->IsCheckCast()) {
+ // The codegen for the instruction overwrites `temp`, so put it back in place.
+ Register obj = locations->InAt(0).AsRegister<Register>();
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ __ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ }
+
+ if (!is_fatal_) {
+ SaveLiveRegisters(codegen, locations);
+ }
// We're moving two locations to locations that could overlap, so we need a parallel
// move resolver.
@@ -324,18 +337,22 @@
this);
}
- if (instruction_->IsInstanceOf()) {
- x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
- }
- RestoreLiveRegisters(codegen, locations);
+ if (!is_fatal_) {
+ if (instruction_->IsInstanceOf()) {
+ x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
+ }
+ RestoreLiveRegisters(codegen, locations);
- __ jmp(GetExitLabel());
+ __ jmp(GetExitLabel());
+ }
}
const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86"; }
+ bool IsFatal() const OVERRIDE { return is_fatal_; }
private:
HInstruction* const instruction_;
+ const bool is_fatal_;
DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
};
@@ -4956,14 +4973,33 @@
}
void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
- LocationSummary::CallKind call_kind = instruction->IsClassFinal()
- ? LocationSummary::kNoCall
- : LocationSummary::kCallOnSlowPath;
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::Any());
- // Note that TypeCheckSlowPathX86 uses this register too.
- locations->SetOut(Location::RequiresRegister());
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::Any());
+ // Note that TypeCheckSlowPathX86 uses this register too.
+ locations->SetOut(Location::RequiresRegister());
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetOut(Location::RegisterLocation(EAX));
+ }
}
void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
@@ -4972,8 +5008,11 @@
Location cls = locations->InAt(1);
Register out = locations->Out().AsRegister<Register>();
uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
- NearLabel done, zero;
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
SlowPathCode* slow_path = nullptr;
+ NearLabel done, zero;
// Return 0 if `obj` is null.
// Avoid null check if we know obj is not null.
@@ -4981,78 +5020,282 @@
__ testl(obj, obj);
__ j(kEqual, &zero);
}
- // Compare the class of `obj` with `cls`.
- __ movl(out, Address(obj, class_offset));
- __ MaybeUnpoisonHeapReference(out);
- if (cls.IsRegister()) {
- __ cmpl(out, cls.AsRegister<Register>());
- } else {
- DCHECK(cls.IsStackSlot()) << cls;
- __ cmpl(out, Address(ESP, cls.GetStackIndex()));
+
+ // In case of an interface check, we put the object class into the object register.
+ // This is safe, as the register is caller-save, and the object must be in another
+ // register if it survives the runtime call.
+ Register target = (instruction->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck)
+ ? obj
+ : out;
+ __ movl(target, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(target);
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck: {
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<Register>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(ESP, cls.GetStackIndex()));
+ }
+ // Classes must be equal for the instanceof to succeed.
+ __ j(kNotEqual, &zero);
+ __ movl(out, Immediate(1));
+ __ jmp(&done);
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ NearLabel loop;
+ __ Bind(&loop);
+ __ movl(out, Address(out, super_offset));
+ __ MaybeUnpoisonHeapReference(out);
+ __ testl(out, out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ j(kEqual, &done);
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<Register>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(ESP, cls.GetStackIndex()));
+ }
+ __ j(kNotEqual, &loop);
+ __ movl(out, Immediate(1));
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ NearLabel loop, success;
+ __ Bind(&loop);
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<Register>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(ESP, cls.GetStackIndex()));
+ }
+ __ j(kEqual, &success);
+ __ movl(out, Address(out, super_offset));
+ __ MaybeUnpoisonHeapReference(out);
+ __ testl(out, out);
+ __ j(kNotEqual, &loop);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ jmp(&done);
+ __ Bind(&success);
+ __ movl(out, Immediate(1));
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ movl(out, Address(out, component_offset));
+ __ MaybeUnpoisonHeapReference(out);
+ __ testl(out, out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ j(kEqual, &done);
+ __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
+ __ j(kNotEqual, &zero);
+ __ movl(out, Immediate(1));
+ __ jmp(&done);
+ break;
+ }
+ case TypeCheckKind::kArrayCheck: {
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<Register>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(ESP, cls.GetStackIndex()));
+ }
+ DCHECK(locations->OnlyCallsOnSlowPath());
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
+ instruction, /* is_fatal */ false);
+ codegen_->AddSlowPath(slow_path);
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ __ movl(out, Immediate(1));
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
+
+ case TypeCheckKind::kInterfaceCheck:
+ default: {
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
}
- if (instruction->IsClassFinal()) {
- // Classes must be equal for the instanceof to succeed.
- __ j(kNotEqual, &zero);
- __ movl(out, Immediate(1));
- __ jmp(&done);
- } else {
- // If the classes are not equal, we go into a slow path.
- DCHECK(locations->OnlyCallsOnSlowPath());
- slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction);
- codegen_->AddSlowPath(slow_path);
- __ j(kNotEqual, slow_path->GetEntryLabel());
- __ movl(out, Immediate(1));
- __ jmp(&done);
- }
-
- if (instruction->MustDoNullCheck() || instruction->IsClassFinal()) {
+ if (zero.IsLinked()) {
__ Bind(&zero);
- __ movl(out, Immediate(0));
+ __ xorl(out, out);
+ }
+
+ if (done.IsLinked()) {
+ __ Bind(&done);
}
if (slow_path != nullptr) {
__ Bind(slow_path->GetExitLabel());
}
- __ Bind(&done);
}
void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = throws_into_catch
+ ? LocationSummary::kCallOnSlowPath
+ : LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
+
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
- instruction, LocationSummary::kCallOnSlowPath);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::Any());
- // Note that TypeCheckSlowPathX86 uses this register too.
- locations->AddTemp(Location::RequiresRegister());
+ instruction, call_kind);
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::Any());
+ // Note that TypeCheckSlowPathX86 uses this register too.
+ locations->AddTemp(Location::RequiresRegister());
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ }
}
void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
LocationSummary* locations = instruction->GetLocations();
Register obj = locations->InAt(0).AsRegister<Register>();
Location cls = locations->InAt(1);
- Register temp = locations->GetTemp(0).AsRegister<Register>();
- uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
- SlowPathCode* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction);
- codegen_->AddSlowPath(slow_path);
+ Register temp = locations->WillCall()
+ ? kNoRegister
+ : locations->GetTemp(0).AsRegister<Register>();
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
+ SlowPathCode* slow_path = nullptr;
+
+ if (!locations->WillCall()) {
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
+ instruction, !locations->CanCall());
+ codegen_->AddSlowPath(slow_path);
+ }
+
+ NearLabel done, abstract_entry;
// Avoid null check if we know obj is not null.
if (instruction->MustDoNullCheck()) {
__ testl(obj, obj);
- __ j(kEqual, slow_path->GetExitLabel());
+ __ j(kEqual, &done);
}
- // Compare the class of `obj` with `cls`.
- __ movl(temp, Address(obj, class_offset));
- __ MaybeUnpoisonHeapReference(temp);
- if (cls.IsRegister()) {
- __ cmpl(temp, cls.AsRegister<Register>());
+
+ if (locations->WillCall()) {
+ __ movl(obj, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(obj);
} else {
- DCHECK(cls.IsStackSlot()) << cls;
- __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
+ __ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
}
- // The checkcast succeeds if the classes are equal (fast path).
- // Otherwise, we need to go into the slow path to check the types.
- __ j(kNotEqual, slow_path->GetEntryLabel());
- __ Bind(slow_path->GetExitLabel());
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kArrayCheck: {
+ if (cls.IsRegister()) {
+ __ cmpl(temp, cls.AsRegister<Register>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
+ }
+ // Jump to slow path for throwing the exception or doing a
+ // more involved array check.
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ NearLabel loop, success;
+ __ Bind(&loop);
+ __ movl(temp, Address(temp, super_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ __ testl(temp, temp);
+ // Jump to the slow path to throw the exception.
+ __ j(kEqual, slow_path->GetEntryLabel());
+ if (cls.IsRegister()) {
+ __ cmpl(temp, cls.AsRegister<Register>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
+ }
+ __ j(kNotEqual, &loop);
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ NearLabel loop, success;
+ __ Bind(&loop);
+ if (cls.IsRegister()) {
+ __ cmpl(temp, cls.AsRegister<Register>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
+ }
+ __ j(kEqual, &success);
+ __ movl(temp, Address(temp, super_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ __ testl(temp, temp);
+ __ j(kNotEqual, &loop);
+ // Jump to the slow path to throw the exception.
+ __ jmp(slow_path->GetEntryLabel());
+ __ Bind(&success);
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ movl(temp, Address(temp, component_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ __ testl(temp, temp);
+ __ j(kEqual, slow_path->GetEntryLabel());
+ __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ break;
+ }
+ case TypeCheckKind::kInterfaceCheck:
+ default:
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ break;
+ }
+ __ Bind(&done);
+
+ if (slow_path != nullptr) {
+ __ Bind(slow_path->GetExitLabel());
+ }
}
void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 289ef64..32a1db5 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -300,8 +300,8 @@
class TypeCheckSlowPathX86_64 : public SlowPathCode {
public:
- explicit TypeCheckSlowPathX86_64(HInstruction* instruction)
- : instruction_(instruction) {}
+ TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
+ : instruction_(instruction), is_fatal_(is_fatal) {}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
LocationSummary* locations = instruction_->GetLocations();
@@ -313,7 +313,19 @@
CodeGeneratorX86_64* x64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
__ Bind(GetEntryLabel());
- SaveLiveRegisters(codegen, locations);
+
+ if (instruction_->IsCheckCast()) {
+ // The codegen for the instruction overwrites `temp`, so put it back in place.
+ CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ __ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ }
+
+ if (!is_fatal_) {
+ SaveLiveRegisters(codegen, locations);
+ }
// We're moving two locations to locations that could overlap, so we need a parallel
// move resolver.
@@ -339,18 +351,23 @@
this);
}
- if (instruction_->IsInstanceOf()) {
- x64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
- }
+ if (!is_fatal_) {
+ if (instruction_->IsInstanceOf()) {
+ x64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
+ }
- RestoreLiveRegisters(codegen, locations);
- __ jmp(GetExitLabel());
+ RestoreLiveRegisters(codegen, locations);
+ __ jmp(GetExitLabel());
+ }
}
const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
+ bool IsFatal() const OVERRIDE { return is_fatal_; }
+
private:
HInstruction* const instruction_;
+ const bool is_fatal_;
DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
};
@@ -4684,14 +4701,33 @@
}
void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
- LocationSummary::CallKind call_kind = instruction->IsClassFinal()
- ? LocationSummary::kNoCall
- : LocationSummary::kCallOnSlowPath;
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::Any());
- // Note that TypeCheckSlowPathX86_64 uses this register too.
- locations->SetOut(Location::RequiresRegister());
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::Any());
+ // Note that TypeCheckSlowPathX86_64 uses this register too.
+ locations->SetOut(Location::RequiresRegister());
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetOut(Location::RegisterLocation(RAX));
+ }
}
void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
@@ -4700,8 +4736,11 @@
Location cls = locations->InAt(1);
CpuRegister out = locations->Out().AsRegister<CpuRegister>();
uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
- NearLabel done, zero;
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
SlowPathCode* slow_path = nullptr;
+ NearLabel done, zero;
// Return 0 if `obj` is null.
// Avoid null check if we know obj is not null.
@@ -4709,77 +4748,282 @@
__ testl(obj, obj);
__ j(kEqual, &zero);
}
- // Compare the class of `obj` with `cls`.
- __ movl(out, Address(obj, class_offset));
- __ MaybeUnpoisonHeapReference(out);
- if (cls.IsRegister()) {
- __ cmpl(out, cls.AsRegister<CpuRegister>());
- } else {
- DCHECK(cls.IsStackSlot()) << cls;
- __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
- }
- if (instruction->IsClassFinal()) {
- // Classes must be equal for the instanceof to succeed.
- __ j(kNotEqual, &zero);
- __ movl(out, Immediate(1));
- __ jmp(&done);
- } else {
- // If the classes are not equal, we go into a slow path.
- DCHECK(locations->OnlyCallsOnSlowPath());
- slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction);
- codegen_->AddSlowPath(slow_path);
- __ j(kNotEqual, slow_path->GetEntryLabel());
- __ movl(out, Immediate(1));
- __ jmp(&done);
+
+ // In case of an interface check, we put the object class into the object register.
+ // This is safe, as the register is caller-save, and the object must be in another
+ // register if it survives the runtime call.
+ CpuRegister target = (instruction->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck)
+ ? obj
+ : out;
+ __ movl(target, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(target);
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck: {
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<CpuRegister>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ }
+ // Classes must be equal for the instanceof to succeed.
+ __ j(kNotEqual, &zero);
+ __ movl(out, Immediate(1));
+ __ jmp(&done);
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ NearLabel loop, success;
+ __ Bind(&loop);
+ __ movl(out, Address(out, super_offset));
+ __ MaybeUnpoisonHeapReference(out);
+ __ testl(out, out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ j(kEqual, &done);
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<CpuRegister>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ }
+ __ j(kNotEqual, &loop);
+ __ movl(out, Immediate(1));
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ NearLabel loop, success;
+ __ Bind(&loop);
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<CpuRegister>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ }
+ __ j(kEqual, &success);
+ __ movl(out, Address(out, super_offset));
+ __ MaybeUnpoisonHeapReference(out);
+ __ testl(out, out);
+ __ j(kNotEqual, &loop);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ jmp(&done);
+ __ Bind(&success);
+ __ movl(out, Immediate(1));
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ movl(out, Address(out, component_offset));
+ __ MaybeUnpoisonHeapReference(out);
+ __ testl(out, out);
+ // If `out` is null, we use it for the result, and jump to `done`.
+ __ j(kEqual, &done);
+ __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
+ __ j(kNotEqual, &zero);
+ __ movl(out, Immediate(1));
+ __ jmp(&done);
+ break;
+ }
+ case TypeCheckKind::kArrayCheck: {
+ if (cls.IsRegister()) {
+ __ cmpl(out, cls.AsRegister<CpuRegister>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ }
+ DCHECK(locations->OnlyCallsOnSlowPath());
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(
+ instruction, /* is_fatal */ false);
+ codegen_->AddSlowPath(slow_path);
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ __ movl(out, Immediate(1));
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
+
+ case TypeCheckKind::kInterfaceCheck:
+ default: {
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ if (zero.IsLinked()) {
+ __ jmp(&done);
+ }
+ break;
+ }
}
- if (instruction->MustDoNullCheck() || instruction->IsClassFinal()) {
+ if (zero.IsLinked()) {
__ Bind(&zero);
- __ movl(out, Immediate(0));
+ __ xorl(out, out);
+ }
+
+ if (done.IsLinked()) {
+ __ Bind(&done);
}
if (slow_path != nullptr) {
__ Bind(slow_path->GetExitLabel());
}
- __ Bind(&done);
}
void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
+ LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+ bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kAbstractClassCheck:
+ case TypeCheckKind::kClassHierarchyCheck:
+ case TypeCheckKind::kArrayObjectCheck:
+ call_kind = throws_into_catch
+ ? LocationSummary::kCallOnSlowPath
+ : LocationSummary::kNoCall;
+ break;
+ case TypeCheckKind::kInterfaceCheck:
+ call_kind = LocationSummary::kCall;
+ break;
+ case TypeCheckKind::kArrayCheck:
+ call_kind = LocationSummary::kCallOnSlowPath;
+ break;
+ }
+
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
- instruction, LocationSummary::kCallOnSlowPath);
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetInAt(1, Location::Any());
- // Note that TypeCheckSlowPathX86_64 uses this register too.
- locations->AddTemp(Location::RequiresRegister());
+ instruction, call_kind);
+ if (call_kind != LocationSummary::kCall) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::Any());
+ // Note that TypeCheckSlowPathX86_64 uses this register too.
+ locations->AddTemp(Location::RequiresRegister());
+ } else {
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ }
}
void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
LocationSummary* locations = instruction->GetLocations();
CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
Location cls = locations->InAt(1);
- CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
- uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
- SlowPathCode* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction);
- codegen_->AddSlowPath(slow_path);
+ CpuRegister temp = locations->WillCall()
+ ? CpuRegister(kNoRegister)
+ : locations->GetTemp(0).AsRegister<CpuRegister>();
+ uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
+ uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
+ uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
+ SlowPathCode* slow_path = nullptr;
+
+ if (!locations->WillCall()) {
+ slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(
+ instruction, !locations->CanCall());
+ codegen_->AddSlowPath(slow_path);
+ }
+
+ NearLabel done;
// Avoid null check if we know obj is not null.
if (instruction->MustDoNullCheck()) {
__ testl(obj, obj);
- __ j(kEqual, slow_path->GetExitLabel());
+ __ j(kEqual, &done);
}
- // Compare the class of `obj` with `cls`.
- __ movl(temp, Address(obj, class_offset));
- __ MaybeUnpoisonHeapReference(temp);
- if (cls.IsRegister()) {
- __ cmpl(temp, cls.AsRegister<CpuRegister>());
+
+ if (locations->WillCall()) {
+ __ movl(obj, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(obj);
} else {
- DCHECK(cls.IsStackSlot()) << cls;
- __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ __ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
}
- // The checkcast succeeds if the classes are equal (fast path).
- // Otherwise, we need to go into the slow path to check the types.
- __ j(kNotEqual, slow_path->GetEntryLabel());
- __ Bind(slow_path->GetExitLabel());
+
+ switch (instruction->GetTypeCheckKind()) {
+ case TypeCheckKind::kExactCheck:
+ case TypeCheckKind::kArrayCheck: {
+ if (cls.IsRegister()) {
+ __ cmpl(temp, cls.AsRegister<CpuRegister>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ }
+ // Jump to slow path for throwing the exception or doing a
+ // more involved array check.
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ break;
+ }
+ case TypeCheckKind::kAbstractClassCheck: {
+ // If the class is abstract, we eagerly fetch the super class of the
+ // object to avoid doing a comparison we know will fail.
+ NearLabel loop;
+ __ Bind(&loop);
+ __ movl(temp, Address(temp, super_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ __ testl(temp, temp);
+ // Jump to the slow path to throw the exception.
+ __ j(kEqual, slow_path->GetEntryLabel());
+ if (cls.IsRegister()) {
+ __ cmpl(temp, cls.AsRegister<CpuRegister>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ }
+ __ j(kNotEqual, &loop);
+ break;
+ }
+ case TypeCheckKind::kClassHierarchyCheck: {
+ // Walk over the class hierarchy to find a match.
+ NearLabel loop, success;
+ __ Bind(&loop);
+ if (cls.IsRegister()) {
+ __ cmpl(temp, cls.AsRegister<CpuRegister>());
+ } else {
+ DCHECK(cls.IsStackSlot()) << cls;
+ __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
+ }
+ __ j(kEqual, &success);
+ __ movl(temp, Address(temp, super_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ __ testl(temp, temp);
+ __ j(kNotEqual, &loop);
+ // Jump to the slow path to throw the exception.
+ __ jmp(slow_path->GetEntryLabel());
+ __ Bind(&success);
+ break;
+ }
+ case TypeCheckKind::kArrayObjectCheck: {
+ // Just need to check that the object's class is a non primitive array.
+ __ movl(temp, Address(temp, component_offset));
+ __ MaybeUnpoisonHeapReference(temp);
+ __ testl(temp, temp);
+ __ j(kEqual, slow_path->GetEntryLabel());
+ __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ break;
+ }
+ case TypeCheckKind::kInterfaceCheck:
+ default:
+ codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
+ instruction,
+ instruction->GetDexPc(),
+ nullptr);
+ break;
+ }
+ __ Bind(&done);
+
+ if (slow_path != nullptr) {
+ __ Bind(slow_path->GetExitLabel());
+ }
}
void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 3c57180..8dd31be 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -4719,16 +4719,29 @@
DISALLOW_COPY_AND_ASSIGN(HThrow);
};
+/**
+ * Implementation strategies for the code generator of a HInstanceOf
+ * or `HCheckCast`.
+ */
+enum class TypeCheckKind {
+ kExactCheck, // Can do a single class compare.
+ kClassHierarchyCheck, // Can just walk the super class chain.
+ kAbstractClassCheck, // Can just walk the super class chain, starting one up.
+ kInterfaceCheck, // No optimization yet when checking against an interface.
+ kArrayObjectCheck, // Can just check if the array is not primitive.
+ kArrayCheck // No optimization yet when checking against a generic array.
+};
+
class HInstanceOf : public HExpression<2> {
public:
HInstanceOf(HInstruction* object,
HLoadClass* constant,
- bool class_is_final,
+ TypeCheckKind check_kind,
uint32_t dex_pc)
: HExpression(Primitive::kPrimBoolean,
- SideEffectsForArchRuntimeCalls(class_is_final),
+ SideEffectsForArchRuntimeCalls(check_kind),
dex_pc),
- class_is_final_(class_is_final),
+ check_kind_(check_kind),
must_do_null_check_(true) {
SetRawInputAt(0, object);
SetRawInputAt(1, constant);
@@ -4744,20 +4757,25 @@
return false;
}
- bool IsClassFinal() const { return class_is_final_; }
+ bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; }
+
+ TypeCheckKind GetTypeCheckKind() const { return check_kind_; }
// Used only in code generation.
bool MustDoNullCheck() const { return must_do_null_check_; }
void ClearMustDoNullCheck() { must_do_null_check_ = false; }
- static SideEffects SideEffectsForArchRuntimeCalls(bool class_is_final) {
- return class_is_final ? SideEffects::None() : SideEffects::CanTriggerGC();
+ static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
+ return (check_kind == TypeCheckKind::kExactCheck)
+ ? SideEffects::None()
+ // Mips currently does runtime calls for any other checks.
+ : SideEffects::CanTriggerGC();
}
DECLARE_INSTRUCTION(InstanceOf);
private:
- const bool class_is_final_;
+ const TypeCheckKind check_kind_;
bool must_do_null_check_;
DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
@@ -4813,10 +4831,10 @@
public:
HCheckCast(HInstruction* object,
HLoadClass* constant,
- bool class_is_final,
+ TypeCheckKind check_kind,
uint32_t dex_pc)
: HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc),
- class_is_final_(class_is_final),
+ check_kind_(check_kind),
must_do_null_check_(true) {
SetRawInputAt(0, object);
SetRawInputAt(1, constant);
@@ -4837,14 +4855,14 @@
bool MustDoNullCheck() const { return must_do_null_check_; }
void ClearMustDoNullCheck() { must_do_null_check_ = false; }
+ TypeCheckKind GetTypeCheckKind() const { return check_kind_; }
-
- bool IsClassFinal() const { return class_is_final_; }
+ bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; }
DECLARE_INSTRUCTION(CheckCast);
private:
- const bool class_is_final_;
+ const TypeCheckKind check_kind_;
bool must_do_null_check_;
DISALLOW_COPY_AND_ASSIGN(HCheckCast);
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 9422432..2668b3d 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -348,6 +348,10 @@
return (access_flags & kAccClassIsProxy) != 0;
}
+ static MemberOffset PrimitiveTypeOffset() {
+ return OFFSET_OF_OBJECT_MEMBER(Class, primitive_type_);
+ }
+
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Primitive::Type GetPrimitiveType() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/test/530-instanceof-checkcast/expected.txt b/test/530-instanceof-checkcast/expected.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/530-instanceof-checkcast/expected.txt
diff --git a/test/530-instanceof-checkcast/info.txt b/test/530-instanceof-checkcast/info.txt
new file mode 100644
index 0000000..51f68cc
--- /dev/null
+++ b/test/530-instanceof-checkcast/info.txt
@@ -0,0 +1 @@
+Tests compiler optimizations on instanceof and checkcast.
diff --git a/test/530-instanceof-checkcast/src/Main.java b/test/530-instanceof-checkcast/src/Main.java
new file mode 100644
index 0000000..5f068f1
--- /dev/null
+++ b/test/530-instanceof-checkcast/src/Main.java
@@ -0,0 +1,248 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class Main {
+
+ static Object exactCheck = new ExactCheck();
+ static Object abstractCheckImpl = new AbstractCheckImpl();
+ static Object interfaceCheckImpl = new InterfaceCheckImpl();
+ static Object normalCheck = new NormalCheck();
+ static Object regularObject = new Object();
+ static Object objectArray = new Object[2];
+ static Object intArray = new int[2];
+ static Object doubleArray = new double[2];
+ static Object exactArray = new ExactCheck[2];
+ static Object normalArray = new NormalCheck[2];
+
+ static Object field;
+
+ public static void main(String[] args) {
+ checkInstanceOfNonTryCatch();
+ // We also check for a method with try/catch because the compiler then makes a slow
+ // path unconditionally save its live registers.
+ checkInstanceOfTryCatch();
+
+ checkCheckCast();
+ }
+
+ public static void checkInstanceOfNonTryCatch() {
+ check(true, exactCheck instanceof ExactCheck);
+ check(false, regularObject instanceof ExactCheck);
+
+ check(true, abstractCheckImpl instanceof AbstractCheck);
+ check(false, regularObject instanceof AbstractCheck);
+
+ check(true, interfaceCheckImpl instanceof InterfaceCheck);
+ check(false, regularObject instanceof InterfaceCheck);
+
+ check(true, normalCheck instanceof NormalCheck);
+ check(true, exactCheck instanceof NormalCheck);
+ check(false, regularObject instanceof NormalCheck);
+
+ check(false, regularObject instanceof int[]);
+ check(false, objectArray instanceof int[]);
+ check(true, intArray instanceof int[]);
+ check(false, doubleArray instanceof int[]);
+
+ check(false, regularObject instanceof ExactCheck[]);
+ check(false, objectArray instanceof ExactCheck[]);
+ check(false, doubleArray instanceof ExactCheck[]);
+ check(true, exactArray instanceof ExactCheck[]);
+ check(false, normalArray instanceof ExactCheck[]);
+
+ check(false, regularObject instanceof NormalCheck[]);
+ check(false, objectArray instanceof NormalCheck[]);
+ check(false, doubleArray instanceof NormalCheck[]);
+ check(true, exactArray instanceof NormalCheck[]);
+ check(true, normalArray instanceof NormalCheck[]);
+
+ check(false, regularObject instanceof Object[]);
+ check(true, objectArray instanceof Object[]);
+ check(false, doubleArray instanceof Object[]);
+ check(true, exactArray instanceof Object[]);
+ check(true, normalArray instanceof Object[]);
+ }
+
+ public static void checkInstanceOfTryCatch() {
+ try {
+ check(true, exactCheck instanceof ExactCheck);
+ check(false, regularObject instanceof ExactCheck);
+
+ check(true, abstractCheckImpl instanceof AbstractCheck);
+ check(false, regularObject instanceof AbstractCheck);
+
+ check(true, interfaceCheckImpl instanceof InterfaceCheck);
+ check(false, regularObject instanceof InterfaceCheck);
+
+ check(true, normalCheck instanceof NormalCheck);
+ check(true, exactCheck instanceof NormalCheck);
+ check(false, regularObject instanceof NormalCheck);
+
+ check(false, regularObject instanceof int[]);
+ check(false, objectArray instanceof int[]);
+ check(true, intArray instanceof int[]);
+ check(false, doubleArray instanceof int[]);
+
+ check(false, regularObject instanceof ExactCheck[]);
+ check(false, objectArray instanceof ExactCheck[]);
+ check(false, doubleArray instanceof ExactCheck[]);
+ check(true, exactArray instanceof ExactCheck[]);
+ check(false, normalArray instanceof ExactCheck[]);
+
+ check(false, regularObject instanceof NormalCheck[]);
+ check(false, objectArray instanceof NormalCheck[]);
+ check(false, doubleArray instanceof NormalCheck[]);
+ check(true, exactArray instanceof NormalCheck[]);
+ check(true, normalArray instanceof NormalCheck[]);
+
+ check(false, regularObject instanceof Object[]);
+ check(true, objectArray instanceof Object[]);
+ check(false, doubleArray instanceof Object[]);
+ check(true, exactArray instanceof Object[]);
+ check(true, normalArray instanceof Object[]);
+ } catch (Throwable t) {
+ throw new Error("Unreachable");
+ }
+ }
+
+ public static void check(boolean expected, boolean actual) {
+ if (actual != expected) {
+ throw new Error("Expected " + expected + ", got " + actual);
+ }
+ }
+
+ public static void checkCheckCast() {
+ // Exact check.
+ field = (ExactCheck)exactCheck;
+ try {
+ field = (ExactCheck)regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ // Abstract check.
+ field = (AbstractCheck)abstractCheckImpl;
+ try {
+ field = (AbstractCheck)regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ // Interface check.
+ field = (InterfaceCheck)interfaceCheckImpl;
+ try {
+ field = (InterfaceCheck)regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ // Normal check.
+ field = (NormalCheck)normalCheck;
+ field = (NormalCheck)exactCheck;
+ try {
+ field = (NormalCheck)regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ // Primitive array check.
+ try {
+ field = (int[])regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ try {
+ field = (int[])objectArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ field = (int[])intArray;
+ try {
+ field = (int[])doubleArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ // Array with final component type check.
+ try {
+ field = (ExactCheck[])regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ try {
+ field = (ExactCheck[])objectArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ try {
+ field = (ExactCheck[])doubleArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ field = (ExactCheck[])exactArray;
+ try {
+ field = (ExactCheck[])normalArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ // Array with non final component type check.
+ try {
+ field = (NormalCheck[])regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ try {
+ field = (NormalCheck[])objectArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ try {
+ field = (NormalCheck[])doubleArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ field = (NormalCheck[])exactArray;
+ field = (NormalCheck[])normalArray;
+
+ // Object[] check.
+ try{
+ field = (Object[])regularObject;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ field = (Object[])objectArray;
+ try {
+ field = (Object[])doubleArray;
+ throw new Error("Can't reach here");
+ } catch (ClassCastException ignore) {}
+
+ field = (Object[])exactArray;
+ field = (Object[])normalArray;
+ }
+}
+
+class NormalCheck {
+}
+
+final class ExactCheck extends NormalCheck {
+}
+
+abstract class AbstractCheck {
+}
+
+class AbstractCheckImpl extends AbstractCheck {
+}
+
+interface InterfaceCheck {
+}
+
+class InterfaceCheckImpl implements InterfaceCheck {
+}