Record @{Fast,Critical}Native in method's access flags.
Repurpose the old kAccFastNative flag (which wasn't actually
used for some time) and define a new kAccCriticalNative flag
to record the native method's annotation-based kind. This
avoids repeated determination of the kind from GenericJNI.
And making two transitions to runnable and back (using the
ScopedObjectAccess) from GenericJniMethodEnd() for normal
native methods just to determine that we need to transition
to runnable was really weird.
Since the IsFastNative() function now records the presence
of the @FastNative annotation, synchronized @FastNative
method calls now avoid thread state transitions.
When initializing the Runtime without a boot image, the
WellKnowClasses may not yet be initialized, so relax the
DCheckNativeAnnotation() to take that into account.
Also revert
https://android-review.googlesource.com/509715
as the annotation checks are now much faster.
Bug: 65574695
Bug: 35644369
Test: m test-art-host-gtest
Test: testrunner.py --host
Change-Id: I2fc5ba192b9ce710a0e9202977b4f9543e387efe
diff --git a/compiler/compiler.h b/compiler/compiler.h
index 6c542c8..9179e9c 100644
--- a/compiler/compiler.h
+++ b/compiler/compiler.h
@@ -46,12 +46,6 @@
kOptimizing
};
- enum JniOptimizationFlags {
- kNone = 0x0,
- kFastNative = 0x1,
- kCriticalNative = 0x2,
- };
-
static Compiler* Create(CompilerDriver* driver, Kind kind);
virtual void Init() = 0;
@@ -71,8 +65,7 @@
virtual CompiledMethod* JniCompile(uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- JniOptimizationFlags optimization_flags) const = 0;
+ const DexFile& dex_file) const = 0;
virtual bool JitCompile(Thread* self ATTRIBUTE_UNUSED,
jit::JitCodeCache* code_cache ATTRIBUTE_UNUSED,
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index a9d27ef..32d0bbe 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -46,6 +46,7 @@
#include "dex/verified_method.h"
#include "dex_compilation_unit.h"
#include "dex_file-inl.h"
+#include "dex_file_annotations.h"
#include "dex_instruction-inl.h"
#include "driver/compiler_options.h"
#include "gc/accounting/card_table-inl.h"
@@ -511,40 +512,11 @@
InstructionSetHasGenericJniStub(driver->GetInstructionSet())) {
// Leaving this empty will trigger the generic JNI version
} else {
- // Look-up the ArtMethod associated with this code_item (if any)
- // -- It is later used to lookup any [optimization] annotations for this method.
- ScopedObjectAccess soa(self);
-
- // TODO: Lookup annotation from DexFile directly without resolving method.
- ArtMethod* method =
- Runtime::Current()->GetClassLinker()->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
- dex_file,
- method_idx,
- dex_cache,
- class_loader,
- /* referrer */ nullptr,
- invoke_type);
-
// Query any JNI optimization annotations such as @FastNative or @CriticalNative.
- Compiler::JniOptimizationFlags optimization_flags = Compiler::kNone;
- if (UNLIKELY(method == nullptr)) {
- // Failed method resolutions happen very rarely, e.g. ancestor class cannot be resolved.
- DCHECK(self->IsExceptionPending());
- self->ClearException();
- } else if (method->IsAnnotatedWithFastNative()) {
- // TODO: Will no longer need this CHECK once we have verifier checking this.
- CHECK(!method->IsAnnotatedWithCriticalNative());
- optimization_flags = Compiler::kFastNative;
- } else if (method->IsAnnotatedWithCriticalNative()) {
- // TODO: Will no longer need this CHECK once we have verifier checking this.
- CHECK(!method->IsAnnotatedWithFastNative());
- optimization_flags = Compiler::kCriticalNative;
- }
+ access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
+ dex_file, dex_file.GetClassDef(class_def_idx), method_idx);
- compiled_method = driver->GetCompiler()->JniCompile(access_flags,
- method_idx,
- dex_file,
- optimization_flags);
+ compiled_method = driver->GetCompiler()->JniCompile(access_flags, method_idx, dex_file);
CHECK(compiled_method != nullptr);
}
} else if ((access_flags & kAccAbstract) != 0) {
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index 3460efe..daf64d1 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -55,10 +55,10 @@
namespace art {
enum class JniKind {
- kNormal = Compiler::kNone, // Regular kind of un-annotated natives.
- kFast = Compiler::kFastNative, // Native method annotated with @FastNative.
- kCritical = Compiler::kCriticalNative, // Native method annotated with @CriticalNative.
- kCount = Compiler::kCriticalNative + 1 // How many different types of JNIs we can have.
+ kNormal, // Regular kind of un-annotated natives.
+ kFast, // Native method annotated with @FastNative.
+ kCritical, // Native method annotated with @CriticalNative.
+ kCount // How many different types of JNIs we can have.
};
// Used to initialize array sizes that want to have different state per current jni.
@@ -2205,8 +2205,8 @@
ArtMethod* method = jni::DecodeArtMethod(jmethod_);
ASSERT_TRUE(method != nullptr);
- EXPECT_FALSE(method->IsAnnotatedWithCriticalNative());
- EXPECT_FALSE(method->IsAnnotatedWithFastNative());
+ EXPECT_FALSE(method->IsCriticalNative());
+ EXPECT_FALSE(method->IsFastNative());
}
// TODO: just rename the java functions to the standard convention and remove duplicated tests
@@ -2227,8 +2227,8 @@
ArtMethod* method = jni::DecodeArtMethod(jmethod_);
ASSERT_TRUE(method != nullptr);
- EXPECT_FALSE(method->IsAnnotatedWithCriticalNative());
- EXPECT_TRUE(method->IsAnnotatedWithFastNative());
+ EXPECT_FALSE(method->IsCriticalNative());
+ EXPECT_TRUE(method->IsFastNative());
}
// TODO: just rename the java functions to the standard convention and remove duplicated tests
@@ -2256,8 +2256,8 @@
ArtMethod* method = jni::DecodeArtMethod(jmethod_);
ASSERT_TRUE(method != nullptr);
- EXPECT_TRUE(method->IsAnnotatedWithCriticalNative());
- EXPECT_FALSE(method->IsAnnotatedWithFastNative());
+ EXPECT_TRUE(method->IsCriticalNative());
+ EXPECT_FALSE(method->IsFastNative());
EXPECT_EQ(0, gJava_myClassNatives_criticalNative_calls[gCurrentJni]);
env_->CallStaticVoidMethod(jklass_, jmethod_);
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index b3177aa..b93b05c 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -52,8 +52,6 @@
namespace art {
-using JniOptimizationFlags = Compiler::JniOptimizationFlags;
-
template <PointerSize kPointerSize>
static void CopyParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
ManagedRuntimeCallingConvention* mr_conv,
@@ -120,8 +118,7 @@
static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- JniOptimizationFlags optimization_flags) {
+ const DexFile& dex_file) {
const bool is_native = (access_flags & kAccNative) != 0;
CHECK(is_native);
const bool is_static = (access_flags & kAccStatic) != 0;
@@ -131,10 +128,10 @@
const InstructionSetFeatures* instruction_set_features = driver->GetInstructionSetFeatures();
// i.e. if the method was annotated with @FastNative
- const bool is_fast_native = (optimization_flags == Compiler::kFastNative);
+ const bool is_fast_native = (access_flags & kAccFastNative) != 0u;
// i.e. if the method was annotated with @CriticalNative
- bool is_critical_native = (optimization_flags == Compiler::kCriticalNative);
+ bool is_critical_native = (access_flags & kAccCriticalNative) != 0u;
VLOG(jni) << "JniCompile: Method :: "
<< dex_file.PrettyMethod(method_idx, /* with signature */ true)
@@ -781,14 +778,13 @@
CompiledMethod* ArtQuickJniCompileMethod(CompilerDriver* compiler,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- Compiler::JniOptimizationFlags optimization_flags) {
+ const DexFile& dex_file) {
if (Is64BitInstructionSet(compiler->GetInstructionSet())) {
return ArtJniCompileMethodInternal<PointerSize::k64>(
- compiler, access_flags, method_idx, dex_file, optimization_flags);
+ compiler, access_flags, method_idx, dex_file);
} else {
return ArtJniCompileMethodInternal<PointerSize::k32>(
- compiler, access_flags, method_idx, dex_file, optimization_flags);
+ compiler, access_flags, method_idx, dex_file);
}
}
diff --git a/compiler/jni/quick/jni_compiler.h b/compiler/jni/quick/jni_compiler.h
index 26c32a3..3fcce55 100644
--- a/compiler/jni/quick/jni_compiler.h
+++ b/compiler/jni/quick/jni_compiler.h
@@ -28,8 +28,7 @@
CompiledMethod* ArtQuickJniCompileMethod(CompilerDriver* compiler,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- Compiler::JniOptimizationFlags optimization_flags);
+ const DexFile& dex_file);
} // namespace art
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 9233eb5..252d538 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -311,13 +311,11 @@
CompiledMethod* JniCompile(uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- JniOptimizationFlags optimization_flags) const OVERRIDE {
+ const DexFile& dex_file) const OVERRIDE {
return ArtQuickJniCompileMethod(GetCompilerDriver(),
access_flags,
method_idx,
- dex_file,
- optimization_flags);
+ dex_file);
}
uintptr_t GetEntryPointOf(ArtMethod* method) const OVERRIDE
diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc
index c4f16f5..dcc237d 100644
--- a/openjdkjvmti/ti_redefine.cc
+++ b/openjdkjvmti/ti_redefine.cc
@@ -629,8 +629,8 @@
// Since direct methods have different flags than virtual ones (specifically direct methods must
// have kAccPrivate or kAccStatic or kAccConstructor flags) we can tell if a method changes from
// virtual to direct.
- uint32_t new_flags = new_iter.GetMethodAccessFlags() & ~art::kAccPreviouslyWarm;
- if (new_flags != (old_method->GetAccessFlags() & (art::kAccValidMethodFlags ^ art::kAccPreviouslyWarm))) {
+ uint32_t new_flags = new_iter.GetMethodAccessFlags();
+ if (new_flags != (old_method->GetAccessFlags() & art::kAccValidMethodFlags)) {
RecordFailure(ERR(UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED),
StringPrintf("method '%s' (sig: %s) had different access flags",
new_method_name,
diff --git a/profman/boot_image_profile.cc b/profman/boot_image_profile.cc
index 4092f6e..e5645d3 100644
--- a/profman/boot_image_profile.cc
+++ b/profman/boot_image_profile.cc
@@ -92,7 +92,7 @@
it.SkipInstanceFields();
while (it.HasNextDirectMethod() || it.HasNextVirtualMethod()) {
const uint32_t flags = it.GetMethodAccessFlags();
- if ((flags & kAccNative) != 0 || (flags & kAccFastNative) != 0) {
+ if ((flags & kAccNative) != 0) {
// Native method will get dirtied.
is_clean = false;
break;
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index 12b4d16..e1671c9 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -392,6 +392,7 @@
bool is_synchronized = IsSynchronized();
bool skip_access_checks = SkipAccessChecks();
bool is_fast_native = IsFastNative();
+ bool is_critical_native = IsCriticalNative();
bool is_copied = IsCopied();
bool is_miranda = IsMiranda();
bool is_default = IsDefault();
@@ -404,6 +405,7 @@
DCHECK_EQ(is_synchronized, IsSynchronized());
DCHECK_EQ(skip_access_checks, SkipAccessChecks());
DCHECK_EQ(is_fast_native, IsFastNative());
+ DCHECK_EQ(is_critical_native, IsCriticalNative());
DCHECK_EQ(is_copied, IsCopied());
DCHECK_EQ(is_miranda, IsMiranda());
DCHECK_EQ(is_default, IsDefault());
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index 8709643..0a108f9 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -26,7 +26,6 @@
#include "class_linker-inl.h"
#include "debugger.h"
#include "dex_file-inl.h"
-#include "dex_file_annotations.h"
#include "dex_instruction.h"
#include "entrypoints/runtime_asm_entrypoints.h"
#include "gc/accounting/card_table-inl.h"
@@ -392,13 +391,9 @@
self->PopManagedStackFragment(fragment);
}
-const void* ArtMethod::RegisterNative(const void* native_method, bool is_fast) {
+const void* ArtMethod::RegisterNative(const void* native_method) {
CHECK(IsNative()) << PrettyMethod();
- CHECK(!IsFastNative()) << PrettyMethod();
CHECK(native_method != nullptr) << PrettyMethod();
- if (is_fast) {
- AddAccessFlags(kAccFastNative);
- }
void* new_native_method = nullptr;
Runtime::Current()->GetRuntimeCallbacks()->RegisterNativeMethod(this,
native_method,
@@ -408,7 +403,7 @@
}
void ArtMethod::UnregisterNative() {
- CHECK(IsNative() && !IsFastNative()) << PrettyMethod();
+ CHECK(IsNative()) << PrettyMethod();
// restore stub to lookup native pointer via dlsym
SetEntryPointFromJni(GetJniDlsymLookupStub());
}
@@ -428,18 +423,6 @@
cls == WellKnownClasses::ToClass(WellKnownClasses::java_lang_invoke_VarHandle));
}
-bool ArtMethod::IsAnnotatedWithFastNative() {
- ScopedObjectAccess soa(Thread::Current());
- return annotations::HasFastNativeMethodBuildAnnotation(
- *GetDexFile(), GetClassDef(), GetDexMethodIndex());
-}
-
-bool ArtMethod::IsAnnotatedWithCriticalNative() {
- ScopedObjectAccess soa(Thread::Current());
- return annotations::HasCriticalNativeMethodBuildAnnotation(
- *GetDexFile(), GetClassDef(), GetDexMethodIndex());
-}
-
static uint32_t GetOatMethodIndexFromMethodIndex(const DexFile& dex_file,
uint16_t class_def_idx,
uint32_t method_idx) {
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 8927481..0e98d47 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -200,9 +200,9 @@
}
bool IsMiranda() {
- static_assert((kAccMiranda & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
- "kAccMiranda conflicts with intrinsic modifier");
- return (GetAccessFlags() & kAccMiranda) != 0;
+ // The kAccMiranda flag value is used with a different meaning for native methods,
+ // so we need to check the kAccNative flag as well.
+ return (GetAccessFlags() & (kAccNative | kAccMiranda)) == kAccMiranda;
}
// Returns true if invoking this method will not throw an AbstractMethodError or
@@ -213,6 +213,7 @@
bool IsCompilable() {
if (IsIntrinsic()) {
+ // kAccCompileDontBother overlaps with kAccIntrinsicBits.
return true;
}
return (GetAccessFlags() & kAccCompileDontBother) == 0;
@@ -252,11 +253,24 @@
return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
}
+ // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
bool IsFastNative() {
+ // The presence of the annotation is checked by ClassLinker and recorded in access flags.
+ // The kAccFastNative flag value is used with a different meaning for non-native methods,
+ // so we need to check the kAccNative flag as well.
constexpr uint32_t mask = kAccFastNative | kAccNative;
return (GetAccessFlags() & mask) == mask;
}
+ // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
+ bool IsCriticalNative() {
+ // The presence of the annotation is checked by ClassLinker and recorded in access flags.
+ // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
+ // so we need to check the kAccNative flag as well.
+ constexpr uint32_t mask = kAccCriticalNative | kAccNative;
+ return (GetAccessFlags() & mask) == mask;
+ }
+
bool IsAbstract() {
return (GetAccessFlags() & kAccAbstract) != 0;
}
@@ -274,10 +288,14 @@
bool IsPolymorphicSignature() REQUIRES_SHARED(Locks::mutator_lock_);
bool SkipAccessChecks() {
- return (GetAccessFlags() & kAccSkipAccessChecks) != 0;
+ // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
+ // so we need to check the kAccNative flag as well.
+ return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
}
void SetSkipAccessChecks() {
+ // SkipAccessChecks() is applicable only to non-native methods.
+ DCHECK(!IsNative<kWithoutReadBarrier>());
AddAccessFlags(kAccSkipAccessChecks);
}
@@ -310,14 +328,6 @@
AddAccessFlags(kAccMustCountLocks);
}
- // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative
- // -- Independent of kAccFastNative access flags.
- bool IsAnnotatedWithFastNative();
-
- // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative
- // -- Unrelated to the GC notion of "critical".
- bool IsAnnotatedWithCriticalNative();
-
// Returns true if this method could be overridden by a default method.
bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -417,7 +427,7 @@
// Registers the native method and returns the new entry point. NB The returned entry point might
// be different from the native_method argument if some MethodCallback modifies it.
- const void* RegisterNative(const void* native_method, bool is_fast)
+ const void* RegisterNative(const void* native_method)
REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
void UnregisterNative() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -452,7 +462,7 @@
// where the declaring class is treated as a weak reference (accessing it with
// a read barrier would either prevent unloading the class, or crash the runtime if
// the GC wants to unload it).
- DCHECK(!IsNative<kWithoutReadBarrier>());
+ DCHECK(!IsNative());
if (UNLIKELY(IsProxyMethod())) {
return nullptr;
}
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index bd5e184..6f4b9a5 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -3340,6 +3340,11 @@
}
}
}
+ if (UNLIKELY((access_flags & kAccNative) != 0u)) {
+ // Check if the native method is annotated with @FastNative or @CriticalNative.
+ access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
+ dex_file, dst->GetClassDef(), dex_method_idx);
+ }
dst->SetAccessFlags(access_flags);
}
@@ -7048,6 +7053,7 @@
// verified yet it shouldn't have methods that are skipping access checks.
// TODO This is rather arbitrary. We should maybe support classes where only some of its
// methods are skip_access_checks.
+ DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
@@ -7070,6 +7076,7 @@
// mark this as a default, non-abstract method, since thats what it is. Also clear the
// kAccSkipAccessChecks bit since this class hasn't been verified yet it shouldn't have
// methods that are skipping access checks.
+ DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
constexpr uint32_t kSetFlags = kAccDefault | kAccDefaultConflict | kAccCopied;
constexpr uint32_t kMaskFlags = ~(kAccAbstract | kAccSkipAccessChecks);
new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
diff --git a/runtime/dex_file_annotations.cc b/runtime/dex_file_annotations.cc
index 5496efd..27060ae 100644
--- a/runtime/dex_file_annotations.cc
+++ b/runtime/dex_file_annotations.cc
@@ -1239,8 +1239,11 @@
ScopedObjectAccess soa(Thread::Current());
ObjPtr<mirror::Class> klass = soa.Decode<mirror::Class>(cls);
ClassLinker* linker = Runtime::Current()->GetClassLinker();
- // Lookup using the boot class path loader should yield the annotation class.
- CHECK_EQ(klass, linker->LookupClass(soa.Self(), descriptor, /* class_loader */ nullptr));
+ // WellKnownClasses may not be initialized yet, so `klass` may be null.
+ if (klass != nullptr) {
+ // Lookup using the boot class path loader should yield the annotation class.
+ CHECK_EQ(klass, linker->LookupClass(soa.Self(), descriptor, /* class_loader */ nullptr));
+ }
}
}
@@ -1266,30 +1269,31 @@
return false;
}
-uint32_t HasFastNativeMethodBuildAnnotation(const DexFile& dex_file,
- const DexFile::ClassDef& class_def,
- uint32_t method_index) {
+uint32_t GetNativeMethodAnnotationAccessFlags(const DexFile& dex_file,
+ const DexFile::ClassDef& class_def,
+ uint32_t method_index) {
const DexFile::AnnotationSetItem* annotation_set =
FindAnnotationSetForMethod(dex_file, class_def, method_index);
- return annotation_set != nullptr &&
- IsMethodBuildAnnotationPresent(
- dex_file,
- *annotation_set,
- "Ldalvik/annotation/optimization/FastNative;",
- WellKnownClasses::dalvik_annotation_optimization_FastNative);
-}
-
-uint32_t HasCriticalNativeMethodBuildAnnotation(const DexFile& dex_file,
- const DexFile::ClassDef& class_def,
- uint32_t method_index) {
- const DexFile::AnnotationSetItem* annotation_set =
- FindAnnotationSetForMethod(dex_file, class_def, method_index);
- return annotation_set != nullptr &&
- IsMethodBuildAnnotationPresent(
- dex_file,
- *annotation_set,
- "Ldalvik/annotation/optimization/CriticalNative;",
- WellKnownClasses::dalvik_annotation_optimization_CriticalNative);
+ if (annotation_set == nullptr) {
+ return 0u;
+ }
+ uint32_t access_flags = 0u;
+ if (IsMethodBuildAnnotationPresent(
+ dex_file,
+ *annotation_set,
+ "Ldalvik/annotation/optimization/FastNative;",
+ WellKnownClasses::dalvik_annotation_optimization_FastNative)) {
+ access_flags |= kAccFastNative;
+ }
+ if (IsMethodBuildAnnotationPresent(
+ dex_file,
+ *annotation_set,
+ "Ldalvik/annotation/optimization/CriticalNative;",
+ WellKnownClasses::dalvik_annotation_optimization_CriticalNative)) {
+ access_flags |= kAccCriticalNative;
+ }
+ CHECK_NE(access_flags, kAccFastNative | kAccCriticalNative);
+ return access_flags;
}
mirror::Object* GetAnnotationForClass(Handle<mirror::Class> klass,
diff --git a/runtime/dex_file_annotations.h b/runtime/dex_file_annotations.h
index 04ff3a1..243f30f 100644
--- a/runtime/dex_file_annotations.h
+++ b/runtime/dex_file_annotations.h
@@ -75,15 +75,12 @@
uint32_t visibility = DexFile::kDexVisibilityRuntime)
REQUIRES_SHARED(Locks::mutator_lock_);
// Check whether a method from the `dex_file` with the given `method_index`
-// is annotated with @dalvik.annotation.optimization.FastNative with build visibility.
-uint32_t HasFastNativeMethodBuildAnnotation(const DexFile& dex_file,
- const DexFile::ClassDef& class_def,
- uint32_t method_index);
-// Check whether a method from the `dex_file` with the given `method_index`
-// is annotated with @dalvik.annotation.optimization.CriticalNative with build visibility.
-uint32_t HasCriticalNativeMethodBuildAnnotation(const DexFile& dex_file,
- const DexFile::ClassDef& class_def,
- uint32_t method_index);
+// is annotated with @dalvik.annotation.optimization.FastNative or
+// @dalvik.annotation.optimization.CriticalNative with build visibility.
+// If yes, return the associated access flags, i.e. kAccFastNative or kAccCriticalNative.
+uint32_t GetNativeMethodAnnotationAccessFlags(const DexFile& dex_file,
+ const DexFile::ClassDef& class_def,
+ uint32_t method_index);
// Class annotations.
mirror::Object* GetAnnotationForClass(Handle<mirror::Class> klass,
diff --git a/runtime/entrypoints/jni/jni_entrypoints.cc b/runtime/entrypoints/jni/jni_entrypoints.cc
index dd0819e..7ec360a 100644
--- a/runtime/entrypoints/jni/jni_entrypoints.cc
+++ b/runtime/entrypoints/jni/jni_entrypoints.cc
@@ -46,7 +46,7 @@
return nullptr;
}
// Register so that future calls don't come here
- return method->RegisterNative(native_code, false);
+ return method->RegisterNative(native_code);
}
} // namespace art
diff --git a/runtime/entrypoints/quick/quick_jni_entrypoints.cc b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
index a8d2a34..29a62c8 100644
--- a/runtime/entrypoints/quick/quick_jni_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
@@ -28,10 +28,7 @@
static_assert(sizeof(IRTSegmentState) == sizeof(uint32_t), "IRTSegmentState size unexpected");
static_assert(std::is_trivial<IRTSegmentState>::value, "IRTSegmentState not trivial");
-static bool kEnableAnnotationChecks = RegisterRuntimeDebugFlag(&kEnableAnnotationChecks);
-
-template <bool kDynamicFast>
-static inline void GoToRunnableFast(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
+static inline void GoToRunnableFast(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
extern void ReadBarrierJni(mirror::CompressedReference<mirror::Object>* handle_on_stack,
Thread* self ATTRIBUTE_UNUSED) {
@@ -56,9 +53,9 @@
uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie);
env->local_ref_cookie = env->locals.GetSegmentState();
- if (kIsDebugBuild && kEnableAnnotationChecks) {
+ if (kIsDebugBuild) {
ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
- CHECK(native_method->IsAnnotatedWithFastNative()) << native_method->PrettyMethod();
+ CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
}
return saved_local_ref_cookie;
@@ -71,6 +68,9 @@
uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie);
env->local_ref_cookie = env->locals.GetSegmentState();
ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
+ // TODO: Introduce special entrypoint for synchronized @FastNative methods?
+ // Or ban synchronized @FastNative outright to avoid the extra check here?
+ DCHECK(!native_method->IsFastNative() || native_method->IsSynchronized());
if (!native_method->IsFastNative()) {
// When not fast JNI we transition out of runnable.
self->TransitionFromRunnableToSuspended(kNative);
@@ -90,25 +90,18 @@
if (!is_fast) {
self->TransitionFromSuspendedToRunnable();
} else {
- GoToRunnableFast</*kDynamicFast*/true>(self);
+ GoToRunnableFast(self);
}
}
-// TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI.
-template <bool kDynamicFast>
-ALWAYS_INLINE static inline void GoToRunnableFast(Thread* self) NO_THREAD_SAFETY_ANALYSIS {
- if (kIsDebugBuild && kEnableAnnotationChecks) {
- // Should only enter here if the method is !Fast JNI or @FastNative.
+ALWAYS_INLINE static inline void GoToRunnableFast(Thread* self) {
+ if (kIsDebugBuild) {
+ // Should only enter here if the method is @FastNative.
ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
-
- if (kDynamicFast) {
- CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
- } else {
- CHECK(native_method->IsAnnotatedWithFastNative()) << native_method->PrettyMethod();
- }
+ CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
}
- // When we are in "fast" JNI or @FastNative, we are already Runnable.
+ // When we are in @FastNative, we are already Runnable.
// Only do a suspend check on the way out of JNI.
if (UNLIKELY(self->TestAllFlags())) {
// In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there
@@ -138,7 +131,7 @@
}
extern void JniMethodFastEnd(uint32_t saved_local_ref_cookie, Thread* self) {
- GoToRunnableFast</*kDynamicFast*/false>(self);
+ GoToRunnableFast(self);
PopLocalReferences(saved_local_ref_cookie, self);
}
@@ -175,7 +168,7 @@
extern mirror::Object* JniMethodFastEndWithReference(jobject result,
uint32_t saved_local_ref_cookie,
Thread* self) {
- GoToRunnableFast</*kDynamicFast*/false>(self);
+ GoToRunnableFast(self);
return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
}
@@ -203,8 +196,8 @@
HandleScope* handle_scope)
// TODO: NO_THREAD_SAFETY_ANALYSIS as GoToRunnable() is NO_THREAD_SAFETY_ANALYSIS
NO_THREAD_SAFETY_ANALYSIS {
- bool critical_native = called->IsAnnotatedWithCriticalNative();
- bool fast_native = called->IsAnnotatedWithFastNative();
+ bool critical_native = called->IsCriticalNative();
+ bool fast_native = called->IsFastNative();
bool normal_native = !critical_native && !fast_native;
// @Fast and @CriticalNative do not do a state transition.
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index a4a8c34..127b5d7 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -2171,32 +2171,14 @@
*/
extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
REQUIRES_SHARED(Locks::mutator_lock_) {
+ // Note: We cannot walk the stack properly until fixed up below.
ArtMethod* called = *sp;
DCHECK(called->IsNative()) << called->PrettyMethod(true);
- // Fix up a callee-save frame at the bottom of the stack (at `*sp`,
- // above the alloca region) while we check for optimization
- // annotations, thus allowing stack walking until the completion of
- // the JNI frame creation.
- //
- // Note however that the Generic JNI trampoline does not expect
- // exception being thrown at that stage.
- *sp = Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
- self->SetTopOfStack(sp);
uint32_t shorty_len = 0;
const char* shorty = called->GetShorty(&shorty_len);
- // Optimization annotations lookup does not try to resolve classes,
- // as this may throw an exception, which is not supported by the
- // Generic JNI trampoline at this stage; instead, method's
- // annotations' classes are looked up in the bootstrap class
- // loader's resolved types (which won't trigger an exception).
- CHECK(!self->IsExceptionPending());
- bool critical_native = called->IsAnnotatedWithCriticalNative();
- CHECK(!self->IsExceptionPending());
- bool fast_native = called->IsAnnotatedWithFastNative();
- CHECK(!self->IsExceptionPending());
+ bool critical_native = called->IsCriticalNative();
+ bool fast_native = called->IsFastNative();
bool normal_native = !critical_native && !fast_native;
- // Restore the initial ArtMethod pointer at `*sp`.
- *sp = called;
// Run the visitor and update sp.
BuildGenericJniFrameVisitor visitor(self,
@@ -2212,7 +2194,7 @@
visitor.FinalizeHandleScope(self);
}
- // Fix up managed-stack things in Thread.
+ // Fix up managed-stack things in Thread. After this we can walk the stack.
self->SetTopOfStack(sp);
self->VerifyStack();
diff --git a/runtime/image.cc b/runtime/image.cc
index cf5feac..8f35d84 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -26,7 +26,7 @@
namespace art {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '5', '0', '\0' }; // strcmp() @FastNative.
+const uint8_t ImageHeader::kImageVersion[] = { '0', '5', '1', '\0' }; // @FastNative access flags.
ImageHeader::ImageHeader(uint32_t image_begin,
uint32_t image_size,
diff --git a/runtime/jni_internal.cc b/runtime/jni_internal.cc
index 5164c85..1e55158 100644
--- a/runtime/jni_internal.cc
+++ b/runtime/jni_internal.cc
@@ -2364,7 +2364,7 @@
// TODO: make this a hard register error in the future.
}
- const void* final_function_ptr = m->RegisterNative(fnPtr, is_fast);
+ const void* final_function_ptr = m->RegisterNative(fnPtr);
UNUSED(final_function_ptr);
}
return JNI_OK;
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 4d810db..892c039 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -1244,7 +1244,6 @@
// still return a synthetic method to handle situations like
// escalated visibility. We never return miranda methods that
// were synthesized by the runtime.
- constexpr uint32_t kSkipModifiers = kAccMiranda | kAccSynthetic;
StackHandleScope<3> hs(self);
auto h_method_name = hs.NewHandle(name);
if (UNLIKELY(h_method_name == nullptr)) {
@@ -1264,11 +1263,10 @@
}
continue;
}
- auto modifiers = m.GetAccessFlags();
- if ((modifiers & kSkipModifiers) == 0) {
- return Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, &m);
- }
- if ((modifiers & kAccMiranda) == 0) {
+ if (!m.IsMiranda()) {
+ if (!m.IsSynthetic()) {
+ return Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, &m);
+ }
result = &m; // Remember as potential result if it's not a miranda method.
}
}
@@ -1291,11 +1289,11 @@
}
continue;
}
- if ((modifiers & kSkipModifiers) == 0) {
+ DCHECK(!m.IsMiranda()); // Direct methods cannot be miranda methods.
+ if ((modifiers & kAccSynthetic) == 0) {
return Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, &m);
}
- // Direct methods cannot be miranda methods, so this potential result must be synthetic.
- result = &m;
+ result = &m; // Remember as potential result.
}
}
return result != nullptr
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index bf49f51..c545a9b 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -286,7 +286,7 @@
// This does not necessarily mean that access checks are avoidable,
// since the class methods might still need to be run with access checks.
bool WasVerificationAttempted() REQUIRES_SHARED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccSkipAccessChecks) != 0;
+ return (GetAccessFlags() & kAccVerificationAttempted) != 0;
}
// Mark the class as having gone through a verification attempt.
diff --git a/runtime/modifiers.h b/runtime/modifiers.h
index 4b790a0..d7d647b 100644
--- a/runtime/modifiers.h
+++ b/runtime/modifiers.h
@@ -49,17 +49,21 @@
// declaring class. This flag may only be applied to methods.
static constexpr uint32_t kAccObsoleteMethod = 0x00040000; // method (runtime)
// Used by a method to denote that its execution does not need to go through slow path interpreter.
-static constexpr uint32_t kAccSkipAccessChecks = 0x00080000; // method (dex only)
+static constexpr uint32_t kAccSkipAccessChecks = 0x00080000; // method (runtime, not native)
// Used by a class to denote that the verifier has attempted to check it at least once.
static constexpr uint32_t kAccVerificationAttempted = 0x00080000; // class (runtime)
-static constexpr uint32_t kAccFastNative = 0x00080000; // method (dex only)
// This is set by the class linker during LinkInterfaceMethods. It is used by a method to represent
// that it was copied from its declaring class into another class. All methods marked kAccMiranda
// and kAccDefaultConflict will have this bit set. Any kAccDefault method contained in the methods_
// array of a concrete class will also have this bit set.
static constexpr uint32_t kAccCopied = 0x00100000; // method (runtime)
-static constexpr uint32_t kAccMiranda = 0x00200000; // method (dex only)
+static constexpr uint32_t kAccMiranda = 0x00200000; // method (runtime, not native)
static constexpr uint32_t kAccDefault = 0x00400000; // method (runtime)
+// Native method flags are set when linking the methods based on the presence of the
+// @dalvik.annotation.optimization.{Fast,Critical}Native annotations with build visibility.
+// Reuse the values of kAccSkipAccessChecks and kAccMiranda which are not used for native methods.
+static constexpr uint32_t kAccFastNative = 0x00080000; // method (runtime; native only)
+static constexpr uint32_t kAccCriticalNative = 0x00200000; // method (runtime; native only)
// Set by the JIT when clearing profiling infos to denote that a method was previously warm.
static constexpr uint32_t kAccPreviouslyWarm = 0x00800000; // method (runtime)
@@ -106,8 +110,9 @@
// Valid (meaningful) bits for a method.
static constexpr uint32_t kAccValidMethodFlags = kAccPublic | kAccPrivate | kAccProtected |
kAccStatic | kAccFinal | kAccSynchronized | kAccBridge | kAccVarargs | kAccNative |
- kAccAbstract | kAccStrict | kAccSynthetic | kAccMiranda | kAccConstructor |
- kAccDeclaredSynchronized | kAccPreviouslyWarm;
+ kAccAbstract | kAccStrict | kAccSynthetic | kAccConstructor | kAccDeclaredSynchronized;
+static_assert(((kAccIntrinsic | kAccIntrinsicBits) & kAccValidMethodFlags) == 0,
+ "Intrinsic bits and valid dex file method access flags must not overlap.");
// Valid (meaningful) bits for a class (not interface).
// Note 1. These are positive bits. Other bits may have to be zero.
diff --git a/runtime/native/scoped_fast_native_object_access-inl.h b/runtime/native/scoped_fast_native_object_access-inl.h
index b2abc46..20ff76e 100644
--- a/runtime/native/scoped_fast_native_object_access-inl.h
+++ b/runtime/native/scoped_fast_native_object_access-inl.h
@@ -27,7 +27,7 @@
inline ScopedFastNativeObjectAccess::ScopedFastNativeObjectAccess(JNIEnv* env)
: ScopedObjectAccessAlreadyRunnable(env) {
Locks::mutator_lock_->AssertSharedHeld(Self());
- DCHECK((*Self()->GetManagedStack()->GetTopQuickFrame())->IsAnnotatedWithFastNative());
+ DCHECK((*Self()->GetManagedStack()->GetTopQuickFrame())->IsFastNative());
// Don't work with raw objects in non-runnable states.
DCHECK_EQ(Self()->GetState(), kRunnable);
}