Direct calls to @CriticalNative methods.
Emit direct calls from compiled managed code to the native
code registered with the method, avoiding the JNI stub.
Golem results:
art-opt-cc x86 x86-64 arm arm64
NativeDowncallStaticCritical +12.5% +62.5% +75.9% +41.7%
NativeDowncallStaticCritical6 +55.6% +87.5% +72.1% +35.3%
art-opt x86 x86-64 arm arm64
NativeDowncallStaticCritical +28.6% +85.6% +76.4% +38.4%
NativeDowncallStaticCritical6 +44.6% +44.6% +74.6% +32.2%
Test: Covered by 178-app-image-native-method.
Test: m test-art-host-gtest
Test: testrunner.py --host --debuggable --ndebuggable \
--optimizing --jit --jit-on-first-use
Test: run-gtests.sh
Test: testrunner.py --target --optimizing
Test: testrunner.py --target --debuggable --ndebuggable \
--optimizing --jit --jit-on-first-use -t 178
Test: aosp_cf_x86_phone-userdebug boots.
Test: aosp_cf_x86_phone-userdebug/jitzygote boots.
Bug: 112189621
Change-Id: I8b37da51e8fe0b7bc513bb81b127fe0416068866
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index f24c5f4..f7fe27d 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -322,7 +322,7 @@
vm->DeleteWeakGlobalRef(self, classes_[i]);
if (klass != nullptr) {
mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
- class_linker_->FixupStaticTrampolines(klass.Get());
+ class_linker_->FixupStaticTrampolines(self, klass.Get());
}
}
num_classes_ = 0u;
@@ -422,14 +422,14 @@
// Thanks to the x86 memory model, we do not need any memory fences and
// we can immediately mark the class as visibly initialized.
mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
- FixupStaticTrampolines(klass.Get());
+ FixupStaticTrampolines(self, klass.Get());
return nullptr;
}
if (Runtime::Current()->IsActiveTransaction()) {
// Transactions are single-threaded, so we can mark the class as visibly intialized.
// (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
- FixupStaticTrampolines(klass.Get());
+ FixupStaticTrampolines(self, klass.Get());
return nullptr;
}
mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
@@ -449,6 +449,65 @@
}
}
+const void* ClassLinker::RegisterNative(
+ Thread* self, ArtMethod* method, const void* native_method) {
+ CHECK(method->IsNative()) << method->PrettyMethod();
+ CHECK(native_method != nullptr) << method->PrettyMethod();
+ void* new_native_method = nullptr;
+ Runtime* runtime = Runtime::Current();
+ runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
+ native_method,
+ /*out*/&new_native_method);
+ if (method->IsCriticalNative()) {
+ MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
+ // Remove old registered method if any.
+ auto it = critical_native_code_with_clinit_check_.find(method);
+ if (it != critical_native_code_with_clinit_check_.end()) {
+ critical_native_code_with_clinit_check_.erase(it);
+ }
+ // To ensure correct memory visibility, we need the class to be visibly
+ // initialized before we can set the JNI entrypoint.
+ if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
+ method->SetEntryPointFromJni(new_native_method);
+ } else {
+ critical_native_code_with_clinit_check_.emplace(method, new_native_method);
+ }
+ } else {
+ method->SetEntryPointFromJni(new_native_method);
+ }
+ return new_native_method;
+}
+
+void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
+ CHECK(method->IsNative()) << method->PrettyMethod();
+ // Restore stub to lookup native pointer via dlsym.
+ if (method->IsCriticalNative()) {
+ MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
+ auto it = critical_native_code_with_clinit_check_.find(method);
+ if (it != critical_native_code_with_clinit_check_.end()) {
+ critical_native_code_with_clinit_check_.erase(it);
+ }
+ method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
+ } else {
+ method->SetEntryPointFromJni(GetJniDlsymLookupStub());
+ }
+}
+
+const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
+ if (method->IsCriticalNative()) {
+ MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
+ auto it = critical_native_code_with_clinit_check_.find(method);
+ if (it != critical_native_code_with_clinit_check_.end()) {
+ return it->second;
+ }
+ const void* native_code = method->GetEntryPointFromJni();
+ return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
+ } else {
+ const void* native_code = method->GetEntryPointFromJni();
+ return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
+ }
+}
+
void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
bool wrap_in_no_class_def,
bool log) {
@@ -638,6 +697,8 @@
image_pointer_size_(kRuntimePointerSize),
visibly_initialized_callback_lock_("visibly initialized callback lock"),
visibly_initialized_callback_(nullptr),
+ critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
+ critical_native_code_with_clinit_check_(),
cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
// For CHA disabled during Aot, see b/34193647.
@@ -2498,6 +2559,17 @@
CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
data.class_table->Visit<CHAOnDeleteUpdateClassVisitor, kWithoutReadBarrier>(visitor);
}
+ {
+ MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
+ auto end = critical_native_code_with_clinit_check_.end();
+ for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
+ if (data.allocator->ContainsUnsafe(it->first)) {
+ it = critical_native_code_with_clinit_check_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ }
delete data.allocator;
delete data.class_table;
@@ -3531,15 +3603,31 @@
return false;
}
-void ClassLinker::FixupStaticTrampolines(ObjPtr<mirror::Class> klass) {
+void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
ScopedAssertNoThreadSuspension sants(__FUNCTION__);
DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
- if (klass->NumDirectMethods() == 0) {
+ size_t num_direct_methods = klass->NumDirectMethods();
+ if (num_direct_methods == 0) {
return; // No direct methods => no static methods.
}
if (UNLIKELY(klass->IsProxyClass())) {
return;
}
+ PointerSize pointer_size = image_pointer_size_;
+ if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
+ klass->GetDirectMethods(pointer_size).end(),
+ [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
+ // Store registered @CriticalNative methods, if any, to JNI entrypoints.
+ // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
+ ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
+ ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
+ MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
+ auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
+ while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
+ lb->first->SetEntryPointFromJni(lb->second);
+ lb = critical_native_code_with_clinit_check_.erase(lb);
+ }
+ }
Runtime* runtime = Runtime::Current();
if (!runtime->IsStarted()) {
if (runtime->IsAotCompiler() || runtime->GetHeap()->HasBootImageSpace()) {
@@ -3548,18 +3636,13 @@
}
const DexFile& dex_file = klass->GetDexFile();
- const uint16_t class_def_idx = klass->GetDexClassDefIndex();
- CHECK_NE(class_def_idx, DexFile::kDexNoIndex16);
- ClassAccessor accessor(dex_file, class_def_idx);
- // There should always be class data if there were direct methods.
- CHECK(accessor.HasClassData()) << klass->PrettyDescriptor();
bool has_oat_class;
OatFile::OatClass oat_class = OatFile::FindOatClass(dex_file,
klass->GetDexClassDefIndex(),
&has_oat_class);
// Link the code of methods skipped by LinkCode.
- for (size_t method_index = 0; method_index < accessor.NumDirectMethods(); ++method_index) {
- ArtMethod* method = klass->GetDirectMethod(method_index, image_pointer_size_);
+ for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
+ ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
if (!method->IsStatic()) {
// Only update static methods.
continue;
@@ -3664,8 +3747,10 @@
}
if (method->IsNative()) {
- // Unregistering restores the dlsym lookup stub.
- method->UnregisterNative();
+ // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
+ // as the extra processing for @CriticalNative is not needed yet.
+ method->SetEntryPointFromJni(
+ method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
if (enter_interpreter || quick_code == nullptr) {
// We have a native method here without code. Then it should have the generic JNI