Implement Reference.getReferent() intrinsic.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing --jit
Test: aosp_blueline-userdebug boots.
Test: run-gtests.sh
Test: testrunner.py --target --optimizing --jit
Bug: 170286013
Change-Id: I4762f7c1cf3d61de2215ec8f1d14be80289c2372
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index 1663ee9..10d0b89 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -146,6 +146,22 @@
return true;
}
+static bool CanReferenceBootImageObjects(HInvoke* invoke, const CompilerOptions& compiler_options) {
+ // Piggyback on the method load kind to determine whether we can use PC-relative addressing
+ // for AOT. This should cover both the testing config (non-PIC boot image) and codegens that
+ // reject PC-relative load kinds and fall back to the runtime call.
+ if (compiler_options.IsAotCompiler() &&
+ !invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) {
+ return false;
+ }
+ if (!compiler_options.IsBootImage() &&
+ Runtime::Current()->GetHeap()->GetBootImageSpaces().empty()) {
+ DCHECK(compiler_options.IsJitCompiler());
+ return false; // Running without boot image, cannot use required boot image objects.
+ }
+ return true;
+}
+
void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke,
CodeGenerator* codegen,
Location return_location,
@@ -153,11 +169,7 @@
// The intrinsic will call if it needs to allocate a j.l.Integer.
LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
- // Piggyback on the method load kind to determine whether we can use PC-relative addressing
- // for AOT. This should cover both the testing config (non-PIC boot image) and codegens that
- // reject PC-relative load kinds and fall back to the runtime call.
- if (compiler_options.IsAotCompiler() &&
- !invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) {
+ if (!CanReferenceBootImageObjects(invoke, compiler_options)) {
return;
}
if (compiler_options.IsBootImage()) {
@@ -206,9 +218,6 @@
}
} else {
Runtime* runtime = Runtime::Current();
- if (runtime->GetHeap()->GetBootImageSpaces().empty()) {
- return; // Running without boot image, cannot use required boot image objects.
- }
Thread* self = Thread::Current();
ScopedObjectAccess soa(self);
ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
@@ -244,7 +253,7 @@
}
}
- ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
+ ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator();
LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified);
if (call_kind == LocationSummary::kCallOnMainOnly) {
locations->SetInAt(0, Location::RegisterOrConstant(invoke->InputAt(0)));
@@ -354,10 +363,39 @@
return info;
}
+MemberOffset IntrinsicVisitor::GetReferenceDisableIntrinsicOffset() {
+ ScopedObjectAccess soa(Thread::Current());
+ // The "disableIntrinsic" is the first static field.
+ ArtField* field = GetClassRoot<mirror::Reference>()->GetStaticField(0);
+ DCHECK_STREQ(field->GetName(), "disableIntrinsic");
+ return field->GetOffset();
+}
+
+MemberOffset IntrinsicVisitor::GetReferenceSlowPathEnabledOffset() {
+ ScopedObjectAccess soa(Thread::Current());
+ // The "slowPathEnabled" is the second static field.
+ ArtField* field = GetClassRoot<mirror::Reference>()->GetStaticField(1);
+ DCHECK_STREQ(field->GetName(), "slowPathEnabled");
+ return field->GetOffset();
+}
+
+void IntrinsicVisitor::CreateReferenceGetReferentLocations(HInvoke* invoke,
+ CodeGenerator* codegen) {
+ if (!CanReferenceBootImageObjects(invoke, codegen->GetCompilerOptions())) {
+ return;
+ }
+
+ ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator();
+ LocationSummary* locations =
+ new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetOut(Location::RequiresRegister());
+}
+
void IntrinsicVisitor::AssertNonMovableStringClass() {
if (kIsDebugBuild) {
ScopedObjectAccess soa(Thread::Current());
- ObjPtr<mirror::Class> string_class = GetClassRoot<art::mirror::String>();
+ ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>();
CHECK(!art::Runtime::Current()->GetHeap()->IsMovableObject(string_class));
}
}