summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build/Android.bp5
-rw-r--r--build/art.go30
-rw-r--r--compiler/jni/jni_compiler_test.cc34
-rw-r--r--runtime/entrypoints/quick/quick_jni_entrypoints.cc15
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc121
-rw-r--r--test/Android.run-test.mk6
6 files changed, 148 insertions, 63 deletions
diff --git a/build/Android.bp b/build/Android.bp
index 4be43ec439..9156027dee 100644
--- a/build/Android.bp
+++ b/build/Android.bp
@@ -22,8 +22,6 @@ art_global_defaults {
name: "art_defaults",
clang: true,
cflags: [
- "-O3",
-
// Base set of cflags used by all things ART.
"-fno-rtti",
"-ggdb3",
@@ -149,10 +147,9 @@ art_global_defaults {
],
}
-cc_defaults {
+art_debug_defaults {
name: "art_debug_defaults",
cflags: [
- "-O2",
"-DDYNAMIC_ANNOTATIONS_ENABLED=1",
"-DVIXL_DEBUG",
"-UNDEBUG",
diff --git a/build/art.go b/build/art.go
index ba5521a9ae..0ae6c8fa58 100644
--- a/build/art.go
+++ b/build/art.go
@@ -30,6 +30,9 @@ func globalFlags(ctx android.BaseContext) ([]string, []string) {
var cflags []string
var asflags []string
+ opt := envDefault(ctx, "ART_NDEBUG_OPT_FLAG", "-O3")
+ cflags = append(cflags, opt)
+
tlab := false
gcType := envDefault(ctx, "ART_DEFAULT_GC_TYPE", "CMS")
@@ -75,6 +78,15 @@ func globalFlags(ctx android.BaseContext) ([]string, []string) {
return cflags, asflags
}
+func debugFlags(ctx android.BaseContext) []string {
+ var cflags []string
+
+ opt := envDefault(ctx, "ART_DEBUG_OPT_FLAG", "-O2")
+ cflags = append(cflags, opt)
+
+ return cflags
+}
+
func deviceFlags(ctx android.BaseContext) []string {
var cflags []string
deviceFrameSizeLimit := 1736
@@ -143,6 +155,16 @@ func globalDefaults(ctx android.LoadHookContext) {
ctx.AppendProperties(p)
}
+func debugDefaults(ctx android.LoadHookContext) {
+ type props struct {
+ Cflags []string
+ }
+
+ p := &props{}
+ p.Cflags = debugFlags(ctx)
+ ctx.AppendProperties(p)
+}
+
func customLinker(ctx android.LoadHookContext) {
linker := envDefault(ctx, "CUSTOM_TARGET_LINKER", "")
if linker != "" {
@@ -206,6 +228,7 @@ func init() {
soong.RegisterModuleType("art_cc_test_library", artTestLibrary)
soong.RegisterModuleType("art_cc_defaults", artDefaultsFactory)
soong.RegisterModuleType("art_global_defaults", artGlobalDefaultsFactory)
+ soong.RegisterModuleType("art_debug_defaults", artDebugDefaultsFactory)
}
func artGlobalDefaultsFactory() (blueprint.Module, []interface{}) {
@@ -215,6 +238,13 @@ func artGlobalDefaultsFactory() (blueprint.Module, []interface{}) {
return module, props
}
+func artDebugDefaultsFactory() (blueprint.Module, []interface{}) {
+ module, props := artDefaultsFactory()
+ android.AddLoadHook(module, debugDefaults)
+
+ return module, props
+}
+
func artDefaultsFactory() (blueprint.Module, []interface{}) {
c := &codegenProperties{}
module, props := cc.DefaultsFactory(c)
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index cdd4c68470..b692c6d9ad 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -391,12 +391,12 @@ jobject JniCompilerTest::class_loader_;
// 3) synchronized keyword
// -- TODO: We can support (1) if we remove the mutator lock assert during stub lookup.
# define JNI_TEST_NORMAL_ONLY(TestName) \
- TEST_F(JniCompilerTest, TestName ## Default) { \
+ TEST_F(JniCompilerTest, TestName ## NormalCompiler) { \
SCOPED_TRACE("Normal JNI with compiler"); \
gCurrentJni = static_cast<uint32_t>(JniKind::kNormal); \
TestName ## Impl(); \
} \
- TEST_F(JniCompilerTest, TestName ## Generic) { \
+ TEST_F(JniCompilerTest, TestName ## NormalGeneric) { \
SCOPED_TRACE("Normal JNI with generic"); \
gCurrentJni = static_cast<uint32_t>(JniKind::kNormal); \
TEST_DISABLED_FOR_MIPS(); \
@@ -404,46 +404,41 @@ jobject JniCompilerTest::class_loader_;
TestName ## Impl(); \
}
-// Test normal compiler, @FastNative compiler, and normal/@FastNative generic for normal natives.
+// Test (normal, @FastNative) x (compiler, generic).
#define JNI_TEST(TestName) \
JNI_TEST_NORMAL_ONLY(TestName) \
- TEST_F(JniCompilerTest, TestName ## Fast) { \
+ TEST_F(JniCompilerTest, TestName ## FastCompiler) { \
SCOPED_TRACE("@FastNative JNI with compiler"); \
gCurrentJni = static_cast<uint32_t>(JniKind::kFast); \
TestName ## Impl(); \
} \
- \
-
-// TODO: maybe. @FastNative generic JNI support?
-#if 0
+ \
TEST_F(JniCompilerTest, TestName ## FastGeneric) { \
+ SCOPED_TRACE("@FastNative JNI with generic"); \
gCurrentJni = static_cast<uint32_t>(JniKind::kFast); \
TEST_DISABLED_FOR_MIPS(); \
SetCheckGenericJni(true); \
TestName ## Impl(); \
}
-#endif
+// Test (@CriticalNative) x (compiler, generic) only.
#define JNI_TEST_CRITICAL_ONLY(TestName) \
- TEST_F(JniCompilerTest, TestName ## DefaultCritical) { \
+ TEST_F(JniCompilerTest, TestName ## CriticalCompiler) { \
SCOPED_TRACE("@CriticalNative JNI with compiler"); \
gCurrentJni = static_cast<uint32_t>(JniKind::kCritical); \
TestName ## Impl(); \
+ } \
+ TEST_F(JniCompilerTest, TestName ## CriticalGeneric) { \
+ SCOPED_TRACE("@CriticalNative JNI with generic"); \
+ gCurrentJni = static_cast<uint32_t>(JniKind::kCritical); \
+ TestName ## Impl(); \
}
-// Test everything above and also the @CriticalNative compiler, and @CriticalNative generic JNI.
+// Test everything: (normal, @FastNative, @CriticalNative) x (compiler, generic).
#define JNI_TEST_CRITICAL(TestName) \
JNI_TEST(TestName) \
JNI_TEST_CRITICAL_ONLY(TestName) \
-// TODO: maybe, more likely since calling convention changed. @Criticalnative generic JNI support?
-#if 0
- TEST_F(JniCompilerTest, TestName ## GenericCritical) { \
- gCurrentJni = static_cast<uint32_t>(JniKind::kCritical); \
- TestName ## Impl(); \
- }
-#endif
-
static void expectValidThreadState() {
// Normal JNI always transitions to "Native". Other JNIs stay in the "Runnable" state.
if (IsCurrentJniNormal()) {
@@ -506,6 +501,7 @@ static void expectValidJniEnvAndClass(JNIEnv* env, jclass kls) {
// Temporarily disable the EXPECT_NUM_STACK_REFERENCES check (for a single test).
struct ScopedDisableCheckNumStackReferences {
ScopedDisableCheckNumStackReferences() {
+ CHECK(sCheckNumStackReferences); // No nested support.
sCheckNumStackReferences = false;
}
diff --git a/runtime/entrypoints/quick/quick_jni_entrypoints.cc b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
index 76b545652d..446e3431a9 100644
--- a/runtime/entrypoints/quick/quick_jni_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
@@ -169,22 +169,33 @@ extern uint64_t GenericJniMethodEnd(Thread* self,
HandleScope* handle_scope)
// TODO: NO_THREAD_SAFETY_ANALYSIS as GoToRunnable() is NO_THREAD_SAFETY_ANALYSIS
NO_THREAD_SAFETY_ANALYSIS {
- GoToRunnable(self);
+ bool critical_native = called->IsAnnotatedWithCriticalNative();
+ bool fast_native = called->IsAnnotatedWithFastNative();
+ bool normal_native = !critical_native && !fast_native;
+
+ // @Fast and @CriticalNative do not do a state transition.
+ if (LIKELY(normal_native)) {
+ GoToRunnable(self);
+ }
// We need the mutator lock (i.e., calling GoToRunnable()) before accessing the shorty or the
// locked object.
jobject locked = called->IsSynchronized() ? handle_scope->GetHandle(0).ToJObject() : nullptr;
char return_shorty_char = called->GetShorty()[0];
if (return_shorty_char == 'L') {
if (locked != nullptr) {
+ DCHECK(normal_native) << " @FastNative and synchronize is not supported";
UnlockJniSynchronizedMethod(locked, self);
}
return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceHandleResult(
result.l, saved_local_ref_cookie, self));
} else {
if (locked != nullptr) {
+ DCHECK(normal_native) << " @FastNative and synchronize is not supported";
UnlockJniSynchronizedMethod(locked, self); // Must decode before pop.
}
- PopLocalReferences(saved_local_ref_cookie, self);
+ if (LIKELY(!critical_native)) {
+ PopLocalReferences(saved_local_ref_cookie, self);
+ }
switch (return_shorty_char) {
case 'F': {
if (kRuntimeISA == kX86) {
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 3c6f807d64..fcbd8341aa 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -1625,7 +1625,8 @@ class ComputeNativeCallFrameSize {
class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
public:
- ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
+ explicit ComputeGenericJniFrameSize(bool critical_native)
+ : num_handle_scope_references_(0), critical_native_(critical_native) {}
// Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
// is at *m = sp. Will update to point to the bottom of the save frame.
@@ -1711,6 +1712,7 @@ class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
private:
uint32_t num_handle_scope_references_;
+ const bool critical_native_;
};
uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
@@ -1720,6 +1722,11 @@ uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
void ComputeGenericJniFrameSize::WalkHeader(
BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
+ // First 2 parameters are always excluded for @CriticalNative.
+ if (UNLIKELY(critical_native_)) {
+ return;
+ }
+
// JNIEnv
sm->AdvancePointer(nullptr);
@@ -1778,11 +1785,16 @@ class FillNativeCall {
// of transitioning into native code.
class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
public:
- BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len,
+ BuildGenericJniFrameVisitor(Thread* self,
+ bool is_static,
+ bool critical_native,
+ const char* shorty,
+ uint32_t shorty_len,
ArtMethod*** sp)
: QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
- jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
- ComputeGenericJniFrameSize fsc;
+ jni_call_(nullptr, nullptr, nullptr, nullptr, critical_native),
+ sm_(&jni_call_) {
+ ComputeGenericJniFrameSize fsc(critical_native);
uintptr_t* start_gpr_reg;
uint32_t* start_fpr_reg;
uintptr_t* start_stack_arg;
@@ -1793,11 +1805,14 @@ class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
- // jni environment is always first argument
- sm_.AdvancePointer(self->GetJniEnv());
+ // First 2 parameters are always excluded for CriticalNative methods.
+ if (LIKELY(!critical_native)) {
+ // jni environment is always first argument
+ sm_.AdvancePointer(self->GetJniEnv());
- if (is_static) {
- sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
+ if (is_static) {
+ sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
+ } // else "this" reference is already handled by QuickArgumentVisitor.
}
}
@@ -1822,8 +1837,11 @@ class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
class FillJniCall FINAL : public FillNativeCall {
public:
FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
- HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
- handle_scope_(handle_scope), cur_entry_(0) {}
+ HandleScope* handle_scope, bool critical_native)
+ : FillNativeCall(gpr_regs, fpr_regs, stack_args),
+ handle_scope_(handle_scope),
+ cur_entry_(0),
+ critical_native_(critical_native) {}
uintptr_t PushHandle(mirror::Object* ref) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1839,12 +1857,17 @@ class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
while (cur_entry_ < expected_slots) {
handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
}
- DCHECK_NE(cur_entry_, 0U);
+
+ if (!critical_native_) {
+ // Non-critical natives have at least the self class (jclass) or this (jobject).
+ DCHECK_NE(cur_entry_, 0U);
+ }
}
private:
HandleScope* handle_scope_;
size_t cur_entry_;
+ const bool critical_native_;
};
HandleScope* handle_scope_;
@@ -1924,7 +1947,12 @@ extern "C" void* artFindNativeMethod();
extern "C" void* artFindNativeMethod(Thread* self);
#endif
-uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
+static uint64_t artQuickGenericJniEndJNIRef(Thread* self,
+ uint32_t cookie,
+ bool fast_native ATTRIBUTE_UNUSED,
+ jobject l,
+ jobject lock) {
+ // TODO: add entrypoints for @FastNative returning objects.
if (lock != nullptr) {
return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
} else {
@@ -1932,11 +1960,19 @@ uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, j
}
}
-void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
+static void artQuickGenericJniEndJNINonRef(Thread* self,
+ uint32_t cookie,
+ bool fast_native,
+ jobject lock) {
if (lock != nullptr) {
JniMethodEndSynchronized(cookie, lock, self);
+ // Ignore "fast_native" here because synchronized functions aren't very fast.
} else {
- JniMethodEnd(cookie, self);
+ if (UNLIKELY(fast_native)) {
+ JniMethodFastEnd(cookie, self);
+ } else {
+ JniMethodEnd(cookie, self);
+ }
}
}
@@ -1958,9 +1994,17 @@ extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod**
DCHECK(called->IsNative()) << PrettyMethod(called, true);
uint32_t shorty_len = 0;
const char* shorty = called->GetShorty(&shorty_len);
+ bool critical_native = called->IsAnnotatedWithCriticalNative();
+ bool fast_native = called->IsAnnotatedWithFastNative();
+ bool normal_native = !critical_native && !fast_native;
// Run the visitor and update sp.
- BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp);
+ BuildGenericJniFrameVisitor visitor(self,
+ called->IsStatic(),
+ critical_native,
+ shorty,
+ shorty_len,
+ &sp);
{
ScopedAssertNoThreadSuspension sants(__FUNCTION__);
visitor.VisitArguments();
@@ -1973,20 +2017,30 @@ extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod**
self->VerifyStack();
- // Start JNI, save the cookie.
uint32_t cookie;
- if (called->IsSynchronized()) {
- cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
- if (self->IsExceptionPending()) {
- self->PopHandleScope();
- // A negative value denotes an error.
- return GetTwoWordFailureValue();
+ uint32_t* sp32;
+ // Skip calling JniMethodStart for @CriticalNative.
+ if (LIKELY(!critical_native)) {
+ // Start JNI, save the cookie.
+ if (called->IsSynchronized()) {
+ DCHECK(normal_native) << " @FastNative and synchronize is not supported";
+ cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
+ if (self->IsExceptionPending()) {
+ self->PopHandleScope();
+ // A negative value denotes an error.
+ return GetTwoWordFailureValue();
+ }
+ } else {
+ if (fast_native) {
+ cookie = JniMethodFastStart(self);
+ } else {
+ DCHECK(normal_native);
+ cookie = JniMethodStart(self);
+ }
}
- } else {
- cookie = JniMethodStart(self);
+ sp32 = reinterpret_cast<uint32_t*>(sp);
+ *(sp32 - 1) = cookie;
}
- uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
- *(sp32 - 1) = cookie;
// Retrieve the stored native code.
void* nativeCode = called->GetEntryPointFromJni();
@@ -2007,12 +2061,15 @@ extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod**
if (nativeCode == nullptr) {
DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
- // End JNI, as the assembly will move to deliver the exception.
- jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
- if (shorty[0] == 'L') {
- artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
- } else {
- artQuickGenericJniEndJNINonRef(self, cookie, lock);
+ // @CriticalNative calls do not need to call back into JniMethodEnd.
+ if (LIKELY(!critical_native)) {
+ // End JNI, as the assembly will move to deliver the exception.
+ jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
+ if (shorty[0] == 'L') {
+ artQuickGenericJniEndJNIRef(self, cookie, fast_native, nullptr, lock);
+ } else {
+ artQuickGenericJniEndJNINonRef(self, cookie, fast_native, lock);
+ }
}
return GetTwoWordFailureValue();
diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk
index 3bcea184b3..0497735043 100644
--- a/test/Android.run-test.mk
+++ b/test/Android.run-test.mk
@@ -346,9 +346,7 @@ endif
TEST_ART_BROKEN_NO_RELOCATE_TESTS :=
# Temporarily disable some broken tests when forcing access checks in interpreter b/22414682
-# 004-JniTest is disabled because @CriticalNative is unsupported by generic JNI b/31400248
TEST_ART_BROKEN_INTERPRETER_ACCESS_CHECK_TESTS := \
- 004-JniTest \
137-cfi
ifneq (,$(filter interp-ac,$(COMPILER_TYPES)))
@@ -407,11 +405,9 @@ ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),ndebug,$(PREB
# All these tests check that we have sane behavior if we don't have a patchoat or dex2oat.
# Therefore we shouldn't run them in situations where we actually don't have these since they
# explicitly test for them. These all also assume we have an image.
-# 004-JniTest is disabled because @CriticalNative is unsupported by generic JNI b/31400248
# 147-stripped-dex-fallback is disabled because it requires --prebuild.
# 554-jit-profile-file is disabled because it needs a primary oat file to know what it should save.
TEST_ART_BROKEN_FALLBACK_RUN_TESTS := \
- 004-JniTest \
116-nodex2oat \
117-nopatchoat \
118-noimage-dex2oat \
@@ -485,9 +481,7 @@ TEST_ART_BROKEN_INTERPRETER_RUN_TESTS :=
# Known broken tests for the JIT.
# CFI unwinding expects managed frames, and the test does not iterate enough to even compile. JIT
# also uses Generic JNI instead of the JNI compiler.
-# 004-JniTest is disabled because @CriticalNative is unsupported by generic JNI b/31400248
TEST_ART_BROKEN_JIT_RUN_TESTS := \
- 004-JniTest \
137-cfi
ifneq (,$(filter jit,$(COMPILER_TYPES)))