summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler_llvm/runtime_support_llvm.cc8
-rw-r--r--src/interpreter/interpreter.cc9
-rw-r--r--src/oat/runtime/support_throw.cc22
-rw-r--r--src/object.cc14
-rw-r--r--src/runtime_support.cc24
-rw-r--r--src/runtime_support.h2
-rw-r--r--src/thread.h12
7 files changed, 49 insertions, 42 deletions
diff --git a/src/compiler_llvm/runtime_support_llvm.cc b/src/compiler_llvm/runtime_support_llvm.cc
index 28f9335448..371b32a4da 100644
--- a/src/compiler_llvm/runtime_support_llvm.cc
+++ b/src/compiler_llvm/runtime_support_llvm.cc
@@ -150,13 +150,7 @@ void art_throw_null_pointer_exception_from_code(uint32_t dex_pc)
void art_throw_stack_overflow_from_code()
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Thread* thread = art_get_current_thread_from_code();
- if (Runtime::Current()->IsMethodTracingActive()) {
- InstrumentationMethodUnwindFromCode(thread);
- }
- thread->SetStackEndForStackOverflow(); // Allow space on the stack for constructor to execute.
- thread->ThrowNewExceptionF("Ljava/lang/StackOverflowError;", "stack size %s",
- PrettySize(thread->GetStackSize()).c_str());
- thread->ResetDefaultStackEnd(); // Return to default stack size.
+ ThrowStackOverflowError(thread);
}
void art_throw_exception_from_code(Object* exception)
diff --git a/src/interpreter/interpreter.cc b/src/interpreter/interpreter.cc
index d8031c14f4..0a66a21c05 100644
--- a/src/interpreter/interpreter.cc
+++ b/src/interpreter/interpreter.cc
@@ -405,7 +405,9 @@ static void DoInvoke(Thread* self, MethodHelper& mh, ShadowFrame& shadow_frame,
} else {
UnstartedRuntimeInvoke(self, target_method, receiver, arg_array.get(), result);
}
- if (!mh.GetReturnType()->IsPrimitive() && result->GetL() != NULL) {
+ // Check the return type if the result is non-null. We do the GetReturnType
+ // after the null check to avoid resolution when there's an exception pending.
+ if (result->GetL() != NULL && !mh.GetReturnType()->IsPrimitive()) {
CHECK(mh.GetReturnType()->IsAssignableFrom(result->GetL()->GetClass()));
}
mh.ChangeMethod(shadow_frame.GetMethod());
@@ -1747,6 +1749,11 @@ static JValue Execute(Thread* self, MethodHelper& mh, const DexFile::CodeItem* c
void EnterInterpreterFromInvoke(Thread* self, AbstractMethod* method, Object* receiver,
JValue* args, JValue* result) {
DCHECK_EQ(self, Thread::Current());
+ if (__builtin_frame_address(0) < self->GetStackEnd()) {
+ ThrowStackOverflowError(self);
+ return;
+ }
+
MethodHelper mh(method);
const DexFile::CodeItem* code_item = mh.GetCodeItem();
uint16_t num_regs;
diff --git a/src/oat/runtime/support_throw.cc b/src/oat/runtime/support_throw.cc
index 420b442e65..21adc52ef5 100644
--- a/src/oat/runtime/support_throw.cc
+++ b/src/oat/runtime/support_throw.cc
@@ -91,27 +91,7 @@ extern "C" void artThrowArrayBoundsFromCode(int index, int limit, Thread* thread
extern "C" void artThrowStackOverflowFromCode(Thread* self, AbstractMethod** sp)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll);
- CHECK(!self->IsHandlingStackOverflow()) << "Recursive stack overflow.";
- // Remove extra entry pushed onto second stack during method tracing.
- if (Runtime::Current()->IsMethodTracingActive()) {
- InstrumentationMethodUnwindFromCode(self);
- }
- self->SetStackEndForStackOverflow(); // Allow space on the stack for constructor to execute.
- JNIEnvExt* env = self->GetJniEnv();
- std::string msg("stack size ");
- msg += PrettySize(self->GetStackSize());
- // Use low-level JNI routine and pre-baked error class to avoid class linking operations that
- // would consume more stack.
- int rc = ::art::ThrowNewException(env, WellKnownClasses::java_lang_StackOverflowError,
- msg.c_str(), NULL);
- if (rc != JNI_OK) {
- // TODO: ThrowNewException failed presumably because of an OOME, we continue to throw the OOME
- // or die in the CHECK below. We may want to throw a pre-baked StackOverflowError
- // instead.
- LOG(ERROR) << "Couldn't throw new StackOverflowError because JNI ThrowNew failed.";
- CHECK(self->IsExceptionPending());
- }
- self->ResetDefaultStackEnd(); // Return to default stack size.
+ ThrowStackOverflowError(self);
self->DeliverException();
}
diff --git a/src/object.cc b/src/object.cc
index dce475f3b3..147b882a50 100644
--- a/src/object.cc
+++ b/src/object.cc
@@ -233,37 +233,37 @@ void Field::SetOffset(MemberOffset num_bytes) {
uint32_t Field::Get32(const Object* object) const {
DCHECK(object != NULL) << PrettyField(this);
- DCHECK(IsStatic() == (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
return object->GetField32(GetOffset(), IsVolatile());
}
void Field::Set32(Object* object, uint32_t new_value) const {
DCHECK(object != NULL) << PrettyField(this);
- DCHECK(IsStatic() == (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
object->SetField32(GetOffset(), new_value, IsVolatile());
}
uint64_t Field::Get64(const Object* object) const {
DCHECK(object != NULL) << PrettyField(this);
- DCHECK(IsStatic() == (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
return object->GetField64(GetOffset(), IsVolatile());
}
void Field::Set64(Object* object, uint64_t new_value) const {
DCHECK(object != NULL) << PrettyField(this);
- DCHECK(IsStatic() == (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
object->SetField64(GetOffset(), new_value, IsVolatile());
}
Object* Field::GetObj(const Object* object) const {
DCHECK(object != NULL) << PrettyField(this);
- DCHECK(IsStatic() == (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
return object->GetFieldObject<Object*>(GetOffset(), IsVolatile());
}
void Field::SetObj(Object* object, const Object* new_value) const {
DCHECK(object != NULL) << PrettyField(this);
- DCHECK(IsStatic() == (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted());
object->SetFieldObject(GetOffset(), new_value, IsVolatile());
}
@@ -663,7 +663,7 @@ void AbstractMethod::Invoke(Thread* self, Object* receiver, JValue* args, JValue
result->SetJ(0);
}
} else {
- bool interpret = self->ReadFlag(kEnterInterpreter) && !IsNative();
+ bool interpret = self->ReadFlag(kEnterInterpreter) && !IsNative() && !IsProxyMethod();
const bool kLogInvocationStartAndReturn = false;
if (!interpret && GetCode() != NULL && stub != NULL) {
if (kLogInvocationStartAndReturn) {
diff --git a/src/runtime_support.cc b/src/runtime_support.cc
index 7ee1960213..92c5e3ae53 100644
--- a/src/runtime_support.cc
+++ b/src/runtime_support.cc
@@ -343,4 +343,28 @@ Class* ResolveVerifyAndClinit(uint32_t type_idx, const AbstractMethod* referrer,
return klass;
}
+void ThrowStackOverflowError(Thread* self) {
+ CHECK(!self->IsHandlingStackOverflow()) << "Recursive stack overflow.";
+ // Remove extra entry pushed onto second stack during method tracing.
+ if (Runtime::Current()->IsMethodTracingActive()) {
+ InstrumentationMethodUnwindFromCode(self);
+ }
+ self->SetStackEndForStackOverflow(); // Allow space on the stack for constructor to execute.
+ JNIEnvExt* env = self->GetJniEnv();
+ std::string msg("stack size ");
+ msg += PrettySize(self->GetStackSize());
+ // Use low-level JNI routine and pre-baked error class to avoid class linking operations that
+ // would consume more stack.
+ int rc = ::art::ThrowNewException(env, WellKnownClasses::java_lang_StackOverflowError,
+ msg.c_str(), NULL);
+ if (rc != JNI_OK) {
+ // TODO: ThrowNewException failed presumably because of an OOME, we continue to throw the OOME
+ // or die in the CHECK below. We may want to throw a pre-baked StackOverflowError
+ // instead.
+ LOG(ERROR) << "Couldn't throw new StackOverflowError because JNI ThrowNew failed.";
+ CHECK(self->IsExceptionPending());
+ }
+ self->ResetDefaultStackEnd(); // Return to default stack size.
+}
+
} // namespace art
diff --git a/src/runtime_support.h b/src/runtime_support.h
index e54e05b16c..d0a6209ca7 100644
--- a/src/runtime_support.h
+++ b/src/runtime_support.h
@@ -231,6 +231,8 @@ extern Class* ResolveVerifyAndClinit(uint32_t type_idx, const AbstractMethod* re
bool can_run_clinit, bool verify_access)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+extern void ThrowStackOverflowError(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
static inline String* ResolveStringFromCode(const AbstractMethod* referrer, uint32_t string_idx)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
diff --git a/src/thread.h b/src/thread.h
index be7c67361f..4e1f0e7a03 100644
--- a/src/thread.h
+++ b/src/thread.h
@@ -106,11 +106,7 @@ enum ThreadFlag {
class PACKED Thread {
public:
// Space to throw a StackOverflowError in.
-#if !defined(ART_USE_LLVM_COMPILER)
- static const size_t kStackOverflowReservedBytes = 4 * KB;
-#else // LLVM_x86 requires more memory to throw stack overflow exception.
- static const size_t kStackOverflowReservedBytes = 8 * KB;
-#endif
+ static const size_t kStackOverflowReservedBytes = 10 * KB;
// Creates a new native thread corresponding to the given managed peer.
// Used to implement Thread.start.
@@ -448,10 +444,14 @@ class PACKED Thread {
}
// Size of stack less any space reserved for stack overflow
- size_t GetStackSize() {
+ size_t GetStackSize() const {
return stack_size_ - (stack_end_ - stack_begin_);
}
+ byte* GetStackEnd() const {
+ return stack_end_;
+ }
+
// Set the stack end to that to be used during a stack overflow
void SetStackEndForStackOverflow() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);