Merge stack usage reduction for overflow checks
Cherry pick from: b038ba66a166fb264ca121632f447712e0973b5b
Change-Id: I19999e72ff731d4fc73d91b9ff767de5858c15ee
diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc
index a0e35f8..cf89850 100644
--- a/runtime/entrypoints/entrypoint_utils.cc
+++ b/runtime/entrypoints/entrypoint_utils.cc
@@ -211,7 +211,7 @@
}
bool explicit_overflow_check = Runtime::Current()->ExplicitStackOverflowChecks();
- self->ResetDefaultStackEnd(!explicit_overflow_check); // Return to default stack size.
+ self->ResetDefaultStackEnd(); // Return to default stack size.
// And restore protection if implicit checks are on.
if (!explicit_overflow_check) {
diff --git a/runtime/thread.cc b/runtime/thread.cc
index dcc897f..d5163a8 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -224,7 +224,8 @@
} else {
// If we are going to use implicit stack checks, allocate space for the protected
// region at the bottom of the stack.
- stack_size += Thread::kStackOverflowImplicitCheckSize;
+ stack_size += Thread::kStackOverflowImplicitCheckSize +
+ GetStackOverflowReservedBytes(kRuntimeISA);
}
// Some systems require the stack size to be a multiple of the system page size, so round up.
@@ -264,7 +265,7 @@
// a segv.
// Read every page from the high address to the low.
- for (byte* p = stack_top; p > pregion; p -= kPageSize) {
+ for (byte* p = stack_top; p >= pregion; p -= kPageSize) {
dont_optimize_this = *p;
}
@@ -539,7 +540,7 @@
// Set stack_end_ to the bottom of the stack saving space of stack overflows
bool implicit_stack_check = !Runtime::Current()->ExplicitStackOverflowChecks();
- ResetDefaultStackEnd(implicit_stack_check);
+ ResetDefaultStackEnd();
// Install the protected region if we are doing implicit overflow checks.
if (implicit_stack_check) {
@@ -551,8 +552,8 @@
// The thread might have protected region at the bottom. We need
// to install our own region so we need to move the limits
// of the stack to make room for it.
- tlsPtr_.stack_begin += guardsize;
- tlsPtr_.stack_end += guardsize;
+ tlsPtr_.stack_begin += guardsize + kStackOverflowProtectedSize;
+ tlsPtr_.stack_end += guardsize + kStackOverflowProtectedSize;
tlsPtr_.stack_size -= guardsize;
InstallImplicitProtection();
diff --git a/runtime/thread.h b/runtime/thread.h
index 120ff6f..fe950c4 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -594,16 +594,10 @@
void SetStackEndForStackOverflow() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Set the stack end to that to be used during regular execution
- void ResetDefaultStackEnd(bool implicit_overflow_check) {
+ void ResetDefaultStackEnd() {
// Our stacks grow down, so we want stack_end_ to be near there, but reserving enough room
// to throw a StackOverflowError.
- if (implicit_overflow_check) {
- // For implicit checks we also need to add in the protected region above the
- // overflow region.
- tlsPtr_.stack_end = tlsPtr_.stack_begin + kStackOverflowImplicitCheckSize;
- } else {
- tlsPtr_.stack_end = tlsPtr_.stack_begin + GetStackOverflowReservedBytes(kRuntimeISA);
- }
+ tlsPtr_.stack_end = tlsPtr_.stack_begin + GetStackOverflowReservedBytes(kRuntimeISA);
}
// Install the protected region for implicit stack checks.