Reduce inlining in debug builds.
Fixes 018-stack-overflow on the host with interpreter.
Change-Id: Ieed091b341b7812cfe898421a74d2f41f6a6a8bc
diff --git a/build/Android.common.mk b/build/Android.common.mk
index 21e829c..f613399 100644
--- a/build/Android.common.mk
+++ b/build/Android.common.mk
@@ -77,6 +77,7 @@
endif
art_debug_cflags := \
+ -fno-inline \
-DDYNAMIC_ANNOTATIONS_ENABLED=1 \
-UNDEBUG
diff --git a/src/base/macros.h b/src/base/macros.h
index 52013da..48cb9c0 100644
--- a/src/base/macros.h
+++ b/src/base/macros.h
@@ -130,6 +130,12 @@
#define LIKELY(x) __builtin_expect((x), true)
#define UNLIKELY(x) __builtin_expect((x), false)
+#ifdef NDEBUG
+#define ALWAYS_INLINE
+#else
+#define ALWAYS_INLINE __attribute__((always_inline))
+#endif
+
// bionic and glibc both have TEMP_FAILURE_RETRY, but Mac OS' libc doesn't.
#ifndef TEMP_FAILURE_RETRY
#define TEMP_FAILURE_RETRY(exp) ({ \
diff --git a/src/base/mutex.h b/src/base/mutex.h
index b530b75..b4e0536 100644
--- a/src/base/mutex.h
+++ b/src/base/mutex.h
@@ -223,14 +223,14 @@
#endif
// Block until ReaderWriterMutex is shared or free then acquire a share on the access.
- void SharedLock(Thread* self) SHARED_LOCK_FUNCTION() __attribute__ ((always_inline));
+ void SharedLock(Thread* self) SHARED_LOCK_FUNCTION() ALWAYS_INLINE;
void ReaderLock(Thread* self) SHARED_LOCK_FUNCTION() { SharedLock(self); }
// Try to acquire share of ReaderWriterMutex.
bool SharedTryLock(Thread* self) EXCLUSIVE_TRYLOCK_FUNCTION(true);
// Release a share of the access.
- void SharedUnlock(Thread* self) UNLOCK_FUNCTION() __attribute__ ((always_inline));
+ void SharedUnlock(Thread* self) UNLOCK_FUNCTION() ALWAYS_INLINE;
void ReaderUnlock(Thread* self) UNLOCK_FUNCTION() { SharedUnlock(self); }
// Is the current thread the exclusive holder of the ReaderWriterMutex.
diff --git a/src/mirror/class.h b/src/mirror/class.h
index 9e440b4..afab314 100644
--- a/src/mirror/class.h
+++ b/src/mirror/class.h
@@ -542,7 +542,7 @@
// super class or interface, return the specific implementation
// method for this class.
AbstractMethod* FindVirtualMethodForInterface(AbstractMethod* method) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) __attribute__ ((always_inline, hot));
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE;
AbstractMethod* FindInterfaceMethod(const StringPiece& name, const StringPiece& descriptor) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/src/scoped_thread_state_change.h b/src/scoped_thread_state_change.h
index 709109a..81db2ec 100644
--- a/src/scoped_thread_state_change.h
+++ b/src/scoped_thread_state_change.h
@@ -30,7 +30,7 @@
class ScopedThreadStateChange {
public:
ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) __attribute__ ((always_inline))
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
if (UNLIKELY(self_ == NULL)) {
// Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
@@ -61,7 +61,7 @@
}
}
- ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) __attribute__ ((always_inline)) {
+ ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
if (UNLIKELY(self_ == NULL)) {
if (!expected_has_no_thread_) {
MutexLock mu(NULL, *Locks::runtime_shutdown_lock_);
@@ -120,7 +120,7 @@
class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
public:
explicit ScopedObjectAccessUnchecked(JNIEnv* env)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) __attribute__ ((always_inline))
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: ScopedThreadStateChange(ThreadForEnv(env), kRunnable),
env_(reinterpret_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
self_->VerifyStack();
@@ -139,7 +139,8 @@
explicit ScopedObjectAccessUnchecked(JavaVM* vm)
: ScopedThreadStateChange(), env_(NULL), vm_(reinterpret_cast<JavaVMExt*>(vm)) {}
- ~ScopedObjectAccessUnchecked() __attribute__ ((always_inline)) {
+ // Here purely to force inlining.
+ ~ScopedObjectAccessUnchecked() ALWAYS_INLINE {
}
JNIEnvExt* Env() const {
@@ -275,7 +276,7 @@
public:
explicit ScopedObjectAccess(JNIEnv* env)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
- SHARED_LOCK_FUNCTION(Locks::mutator_lock_) __attribute__ ((always_inline))
+ SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
: ScopedObjectAccessUnchecked(env) {
Locks::mutator_lock_->AssertSharedHeld(Self());
}
@@ -287,7 +288,7 @@
Locks::mutator_lock_->AssertSharedHeld(Self());
}
- ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) __attribute__ ((always_inline)) {
+ ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE {
// Base class will release share of lock. Invoked after this destructor.
}
diff --git a/src/thread.h b/src/thread.h
index 58de45d..7f8bd7e 100644
--- a/src/thread.h
+++ b/src/thread.h
@@ -170,14 +170,14 @@
ThreadState TransitionFromSuspendedToRunnable()
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
- __attribute__ ((always_inline));
+ ALWAYS_INLINE;
// Transition from runnable into a state where mutator privileges are denied. Releases share of
// mutator lock.
void TransitionFromRunnableToSuspended(ThreadState new_state)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
UNLOCK_FUNCTION(Locks::mutator_lock_)
- __attribute__ ((always_inline));
+ ALWAYS_INLINE;
// Wait for a debugger suspension on the thread associated with the given peer. Returns the
// thread on success, else NULL. If the thread should be suspended then request_suspension should