summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Andreas Gampe <agampe@google.com> 2017-09-29 18:52:15 -0700
committer Andreas Gampe <agampe@google.com> 2017-10-04 19:17:49 -0700
commit1b35b469e0e8a70d9fd3f6c22d7e828b044c0df8 (patch)
tree69ed5dfd4357ae136078dda3094790679d000335
parent3aaa6bf5639e0f77e814424bbcbab4fb9e3e990b (diff)
ART: Add allocation tracking to JNI global refs
Add allocation tracking to global references. When the reference table's free capacity falls under an adjustable value, enable tracking so as to help tracking down possibly-leaky allocations. Bug: 67044702 Test: m test-art-host Change-Id: Ic17d6ebbad895b54c5ac63558027e04aef2b14c1
-rw-r--r--runtime/indirect_reference_table.cc2
-rw-r--r--runtime/indirect_reference_table.h2
-rw-r--r--runtime/java_vm_ext.cc60
-rw-r--r--runtime/java_vm_ext.h6
-rw-r--r--runtime/java_vm_ext_test.cc33
-rw-r--r--runtime/parsed_options.cc3
-rw-r--r--runtime/runtime_options.def2
7 files changed, 99 insertions, 9 deletions
diff --git a/runtime/indirect_reference_table.cc b/runtime/indirect_reference_table.cc
index 2dd4db3895..2c8ec47492 100644
--- a/runtime/indirect_reference_table.cc
+++ b/runtime/indirect_reference_table.cc
@@ -511,7 +511,7 @@ bool IndirectReferenceTable::EnsureFreeCapacity(size_t free_capacity, std::strin
return true;
}
-size_t IndirectReferenceTable::FreeCapacity() {
+size_t IndirectReferenceTable::FreeCapacity() const {
return max_entries_ - segment_state_.top_index;
}
diff --git a/runtime/indirect_reference_table.h b/runtime/indirect_reference_table.h
index 7daf01ce61..6675099523 100644
--- a/runtime/indirect_reference_table.h
+++ b/runtime/indirect_reference_table.h
@@ -293,7 +293,7 @@ class IndirectReferenceTable {
REQUIRES_SHARED(Locks::mutator_lock_);
// See implementation of EnsureFreeCapacity. We'll only state here how much is trivially free,
// without recovering holes. Thus this is a conservative estimate.
- size_t FreeCapacity() REQUIRES_SHARED(Locks::mutator_lock_);
+ size_t FreeCapacity() const;
// Note IrtIterator does not have a read barrier as it's used to visit roots.
IrtIterator begin() {
diff --git a/runtime/java_vm_ext.cc b/runtime/java_vm_ext.cc
index 5a1605323e..a72fa54e08 100644
--- a/runtime/java_vm_ext.cc
+++ b/runtime/java_vm_ext.cc
@@ -28,6 +28,8 @@
#include "check_jni.h"
#include "dex_file-inl.h"
#include "fault_handler.h"
+#include "gc/allocation_record.h"
+#include "gc/heap.h"
#include "gc_root-inl.h"
#include "indirect_reference_table-inl.h"
#include "jni_internal.h"
@@ -468,7 +470,11 @@ JavaVMExt::JavaVMExt(Runtime* runtime,
weak_globals_add_condition_("weak globals add condition",
(CHECK(Locks::jni_weak_globals_lock_ != nullptr),
*Locks::jni_weak_globals_lock_)),
- env_hooks_() {
+ env_hooks_(),
+ enable_allocation_tracking_delta_(
+ runtime_options.GetOrDefault(RuntimeArgumentMap::GlobalRefAllocStackTraceLimit)),
+ allocation_tracking_enabled_(false),
+ old_allocation_tracking_state_(false) {
functions = unchecked_functions_;
SetCheckJniEnabled(runtime_options.Exists(RuntimeArgumentMap::CheckJni));
}
@@ -583,18 +589,55 @@ bool JavaVMExt::ShouldTrace(ArtMethod* method) {
return true;
}
+void JavaVMExt::CheckGlobalRefAllocationTracking() {
+ if (LIKELY(enable_allocation_tracking_delta_ == 0)) {
+ return;
+ }
+ size_t simple_free_capacity = globals_.FreeCapacity();
+ if (UNLIKELY(simple_free_capacity <= enable_allocation_tracking_delta_)) {
+ if (!allocation_tracking_enabled_) {
+ LOG(WARNING) << "Global reference storage appears close to exhaustion, program termination "
+ << "may be imminent. Enabling allocation tracking to improve abort diagnostics. "
+ << "This will result in program slow-down.";
+
+ old_allocation_tracking_state_ = runtime_->GetHeap()->IsAllocTrackingEnabled();
+ if (!old_allocation_tracking_state_) {
+ // Need to be guaranteed suspended.
+ ScopedObjectAccess soa(Thread::Current());
+ ScopedThreadSuspension sts(soa.Self(), ThreadState::kNative);
+ gc::AllocRecordObjectMap::SetAllocTrackingEnabled(true);
+ }
+ allocation_tracking_enabled_ = true;
+ }
+ } else {
+ if (UNLIKELY(allocation_tracking_enabled_)) {
+ if (!old_allocation_tracking_state_) {
+ // Need to be guaranteed suspended.
+ ScopedObjectAccess soa(Thread::Current());
+ ScopedThreadSuspension sts(soa.Self(), ThreadState::kNative);
+ gc::AllocRecordObjectMap::SetAllocTrackingEnabled(false);
+ }
+ allocation_tracking_enabled_ = true;
+ }
+ }
+}
+
jobject JavaVMExt::AddGlobalRef(Thread* self, ObjPtr<mirror::Object> obj) {
// Check for null after decoding the object to handle cleared weak globals.
if (obj == nullptr) {
return nullptr;
}
- WriterMutexLock mu(self, *Locks::jni_globals_lock_);
+ IndirectRef ref;
std::string error_msg;
- IndirectRef ref = globals_.Add(kIRTFirstSegment, obj, &error_msg);
+ {
+ WriterMutexLock mu(self, *Locks::jni_globals_lock_);
+ ref = globals_.Add(kIRTFirstSegment, obj, &error_msg);
+ }
if (UNLIKELY(ref == nullptr)) {
LOG(FATAL) << error_msg;
UNREACHABLE();
}
+ CheckGlobalRefAllocationTracking();
return reinterpret_cast<jobject>(ref);
}
@@ -625,11 +668,14 @@ void JavaVMExt::DeleteGlobalRef(Thread* self, jobject obj) {
if (obj == nullptr) {
return;
}
- WriterMutexLock mu(self, *Locks::jni_globals_lock_);
- if (!globals_.Remove(kIRTFirstSegment, obj)) {
- LOG(WARNING) << "JNI WARNING: DeleteGlobalRef(" << obj << ") "
- << "failed to find entry";
+ {
+ WriterMutexLock mu(self, *Locks::jni_globals_lock_);
+ if (!globals_.Remove(kIRTFirstSegment, obj)) {
+ LOG(WARNING) << "JNI WARNING: DeleteGlobalRef(" << obj << ") "
+ << "failed to find entry";
+ }
}
+ CheckGlobalRefAllocationTracking();
}
void JavaVMExt::DeleteWeakGlobalRef(Thread* self, jweak obj) {
diff --git a/runtime/java_vm_ext.h b/runtime/java_vm_ext.h
index b767b199f0..0510d6ab75 100644
--- a/runtime/java_vm_ext.h
+++ b/runtime/java_vm_ext.h
@@ -211,6 +211,8 @@ class JavaVMExt : public JavaVM {
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::jni_weak_globals_lock_);
+ void CheckGlobalRefAllocationTracking();
+
Runtime* const runtime_;
// Used for testing. By default, we'll LOG(FATAL) the reason.
@@ -247,6 +249,10 @@ class JavaVMExt : public JavaVM {
// TODO Maybe move this to Runtime.
std::vector<GetEnvHook> env_hooks_;
+ size_t enable_allocation_tracking_delta_;
+ std::atomic<bool> allocation_tracking_enabled_;
+ std::atomic<bool> old_allocation_tracking_state_;
+
DISALLOW_COPY_AND_ASSIGN(JavaVMExt);
};
diff --git a/runtime/java_vm_ext_test.cc b/runtime/java_vm_ext_test.cc
index 2cbfa81b91..5db493b115 100644
--- a/runtime/java_vm_ext_test.cc
+++ b/runtime/java_vm_ext_test.cc
@@ -19,6 +19,7 @@
#include <pthread.h>
#include "common_runtime_test.h"
+#include "gc/heap.h"
#include "java_vm_ext.h"
#include "runtime.h"
@@ -134,4 +135,36 @@ TEST_F(JavaVmExtTest, DetachCurrentThread) {
EXPECT_EQ(JNI_ERR, err);
}
+class JavaVmExtStackTraceTest : public JavaVmExtTest {
+ protected:
+ void SetUpRuntimeOptions(RuntimeOptions* options) OVERRIDE {
+ options->emplace_back("-XX:GlobalRefAllocStackTraceLimit=50000", nullptr);
+ }
+};
+
+TEST_F(JavaVmExtStackTraceTest, TestEnableDisable) {
+ ASSERT_FALSE(Runtime::Current()->GetHeap()->IsAllocTrackingEnabled());
+
+ JNIEnv* env;
+ jint ok = vm_->AttachCurrentThread(&env, nullptr);
+ ASSERT_EQ(JNI_OK, ok);
+
+ std::vector<jobject> global_refs_;
+ jobject local_ref = env->NewStringUTF("Dummy");
+ for (size_t i = 0; i < 2000; ++i) {
+ global_refs_.push_back(env->NewGlobalRef(local_ref));
+ }
+
+ EXPECT_TRUE(Runtime::Current()->GetHeap()->IsAllocTrackingEnabled());
+
+ for (jobject global_ref : global_refs_) {
+ env->DeleteGlobalRef(global_ref);
+ }
+
+ EXPECT_FALSE(Runtime::Current()->GetHeap()->IsAllocTrackingEnabled());
+
+ ok = vm_->DetachCurrentThread();
+ EXPECT_EQ(JNI_OK, ok);
+}
+
} // namespace art
diff --git a/runtime/parsed_options.cc b/runtime/parsed_options.cc
index 1d524fd5e6..9888186ed0 100644
--- a/runtime/parsed_options.cc
+++ b/runtime/parsed_options.cc
@@ -310,6 +310,9 @@ std::unique_ptr<RuntimeParser> ParsedOptions::MakeParser(bool ignore_unrecognize
.Define("-XX:ThreadSuspendTimeout=_") // in ms
.WithType<MillisecondsToNanoseconds>() // store as ns
.IntoKey(M::ThreadSuspendTimeout)
+ .Define("-XX:GlobalRefAllocStackTraceLimit=_") // Number of free slots to enable tracing.
+ .WithType<unsigned int>()
+ .IntoKey(M::GlobalRefAllocStackTraceLimit)
.Define("-XX:SlowDebug=_")
.WithType<bool>()
.WithValueMap({{"false", false}, {"true", true}})
diff --git a/runtime/runtime_options.def b/runtime/runtime_options.def
index 78a60faa3a..cafae22e8c 100644
--- a/runtime/runtime_options.def
+++ b/runtime/runtime_options.def
@@ -145,4 +145,6 @@ RUNTIME_OPTIONS_KEY (void (*)(), HookAbort, nullpt
RUNTIME_OPTIONS_KEY (bool, SlowDebug, false)
+RUNTIME_OPTIONS_KEY (unsigned int, GlobalRefAllocStackTraceLimit, 0) // 0 = off
+
#undef RUNTIME_OPTIONS_KEY