Fix and reenable lock dumping in stack dumps.
This patch adds a flag to tell the method verifier not to load
classes when resolving types, so that when we ask the method verifier
to find monitor-enter instructions for stack dumping it doesn't try
to allocate (since the most common cause of stack dumping is SIGQUIT).
We believe that all the classes we care about will be loaded already
anyway, since we're only interested in _held_ locks, and you can only
hold a lock if you've executed the code that leads to the monitor-enter,
and you can't execute the code without loading the relevant classes.
Any not-yet-loaded classes shouldn't be relevant for our purposes.
Also clarify the stack dumps when a thread is starting up; although
strictly speaking a thread might be in the kNative state, it's more
helpful if we also explicitly say that it's still starting up.
Also a few GC log output fixes.
Change-Id: Ibf8519e9bde27838c511eafa5c13734c5bebeab6
diff --git a/src/heap.cc b/src/heap.cc
index 7edbcf0..1cdc9c9 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -396,7 +396,8 @@
LOG(INFO) << "Total time spent in GC: " << PrettyDuration(total_duration);
LOG(INFO) << "Mean GC size throughput: "
<< PrettySize(GetTotalBytesFreed() / total_seconds) << "/s";
- LOG(INFO) << "Mean GC object throughput: " << GetTotalObjectsFreed() / total_seconds << "/s";
+ LOG(INFO) << "Mean GC object throughput: "
+ << (GetTotalObjectsFreed() / total_seconds) << " objects/s";
}
LOG(INFO) << "Total number of allocations: " << total_objects_allocated;
LOG(INFO) << "Total bytes allocated " << PrettySize(total_bytes_allocated);
@@ -406,7 +407,7 @@
<< PrettyDuration(allocation_time / total_objects_allocated);
}
LOG(INFO) << "Total mutator paused time: " << PrettyDuration(total_paused_time);
- LOG(INFO) << "Total time waiting for GC to complete time: " << PrettyDuration(total_wait_time_);
+ LOG(INFO) << "Total time waiting for GC to complete: " << PrettyDuration(total_wait_time_);
}
Heap::~Heap() {
diff --git a/src/monitor.cc b/src/monitor.cc
index 6172136..33383eb 100644
--- a/src/monitor.cc
+++ b/src/monitor.cc
@@ -901,41 +901,35 @@
return; // No "tries" implies no synchronization, so no held locks to report.
}
- // TODO: Enable dex register lock descriptions, disabling as for the portable path GetVReg is
- // unimplemented. There is also a possible deadlock relating to the verifier calling
- // ClassLoader.loadClass and reentering managed code whilst the ThreadList lock is held.
- const bool kEnableDexRegisterLockDescriptions = false;
- if (kEnableDexRegisterLockDescriptions) {
- // Ask the verifier for the dex pcs of all the monitor-enter instructions corresponding to
- // the locks held in this stack frame.
- std::vector<uint32_t> monitor_enter_dex_pcs;
- verifier::MethodVerifier::FindLocksAtDexPc(m, stack_visitor->GetDexPc(), monitor_enter_dex_pcs);
- if (monitor_enter_dex_pcs.empty()) {
- return;
+ // Ask the verifier for the dex pcs of all the monitor-enter instructions corresponding to
+ // the locks held in this stack frame.
+ std::vector<uint32_t> monitor_enter_dex_pcs;
+ verifier::MethodVerifier::FindLocksAtDexPc(m, stack_visitor->GetDexPc(), monitor_enter_dex_pcs);
+ if (monitor_enter_dex_pcs.empty()) {
+ return;
+ }
+
+ // Verification is an iterative process, so it can visit the same monitor-enter instruction
+ // repeatedly with increasingly accurate type information. We don't want duplicates.
+ // TODO: is this fixed if we share the other std::vector-returning verifier code?
+ STLSortAndRemoveDuplicates(&monitor_enter_dex_pcs);
+
+ for (size_t i = 0; i < monitor_enter_dex_pcs.size(); ++i) {
+ // The verifier works in terms of the dex pcs of the monitor-enter instructions.
+ // We want the registers used by those instructions (so we can read the values out of them).
+ uint32_t dex_pc = monitor_enter_dex_pcs[i];
+ uint16_t monitor_enter_instruction = code_item->insns_[dex_pc];
+
+ // Quick sanity check.
+ if ((monitor_enter_instruction & 0xff) != Instruction::MONITOR_ENTER) {
+ LOG(FATAL) << "expected monitor-enter @" << dex_pc << "; was "
+ << reinterpret_cast<void*>(monitor_enter_instruction);
}
- // Verification is an iterative process, so it can visit the same monitor-enter instruction
- // repeatedly with increasingly accurate type information. Our callers don't want to see
- // duplicates.
- STLSortAndRemoveDuplicates(&monitor_enter_dex_pcs);
-
- for (size_t i = 0; i < monitor_enter_dex_pcs.size(); ++i) {
- // The verifier works in terms of the dex pcs of the monitor-enter instructions.
- // We want the registers used by those instructions (so we can read the values out of them).
- uint32_t dex_pc = monitor_enter_dex_pcs[i];
- uint16_t monitor_enter_instruction = code_item->insns_[dex_pc];
-
- // Quick sanity check.
- if ((monitor_enter_instruction & 0xff) != Instruction::MONITOR_ENTER) {
- LOG(FATAL) << "expected monitor-enter @" << dex_pc << "; was "
- << reinterpret_cast<void*>(monitor_enter_instruction);
- }
-
- uint16_t monitor_register = ((monitor_enter_instruction >> 8) & 0xff);
- Object* o = reinterpret_cast<Object*>(stack_visitor->GetVReg(m, monitor_register,
- kReferenceVReg));
- DumpLockedObject(os, o);
- }
+ uint16_t monitor_register = ((monitor_enter_instruction >> 8) & 0xff);
+ Object* o = reinterpret_cast<Object*>(stack_visitor->GetVReg(m, monitor_register,
+ kReferenceVReg));
+ DumpLockedObject(os, o);
}
}
diff --git a/src/signal_catcher.cc b/src/signal_catcher.cc
index 9fdf4e9..c021dd1 100644
--- a/src/signal_catcher.cc
+++ b/src/signal_catcher.cc
@@ -124,7 +124,7 @@
thread_list->SuspendAll();
Thread* self = Thread::Current();
Locks::mutator_lock_->AssertExclusiveHeld(self);
- const char* old_cause = self->StartAssertNoThreadSuspension("Handling sigquit");
+ const char* old_cause = self->StartAssertNoThreadSuspension("Handling SIGQUIT");
ThreadState old_state = self->SetStateUnsafe(kRunnable);
std::ostringstream os;
diff --git a/src/thread.cc b/src/thread.cc
index 484806e..439e8f6 100644
--- a/src/thread.cc
+++ b/src/thread.cc
@@ -773,7 +773,11 @@
}
os << " prio=" << priority
<< " tid=" << thread->GetThinLockId()
- << " " << thread->GetState() << "\n";
+ << " " << thread->GetState();
+ if (thread->IsStillStarting()) {
+ os << " (still starting up)";
+ }
+ os << "\n";
} else {
os << '"' << ::art::GetThreadName(tid) << '"'
<< " prio=" << priority
@@ -996,12 +1000,12 @@
bool Thread::IsStillStarting() const {
// You might think you can check whether the state is kStarting, but for much of thread startup,
- // the thread might also be in kVmWait.
+ // the thread is in kNative; it might also be in kVmWait.
// You might think you can check whether the peer is NULL, but the peer is actually created and
// assigned fairly early on, and needs to be.
// It turns out that the last thing to change is the thread name; that's a good proxy for "has
// this thread _ever_ entered kRunnable".
- return (*name_ == kThreadNameDuringStartup);
+ return (jpeer_ == NULL && opeer_ == NULL) || (*name_ == kThreadNameDuringStartup);
}
void Thread::AssertNoPendingException() const {
diff --git a/src/thread.h b/src/thread.h
index e8f69db..ea49960 100644
--- a/src/thread.h
+++ b/src/thread.h
@@ -289,8 +289,7 @@
}
bool HasPeer() const {
- CHECK(jpeer_ == NULL);
- return opeer_ != NULL;
+ return jpeer_ != NULL || opeer_ != NULL;
}
RuntimeStats* GetStats() {
diff --git a/src/timing_logger.h b/src/timing_logger.h
index b0a3e66..3b3dcfc 100644
--- a/src/timing_logger.h
+++ b/src/timing_logger.h
@@ -175,9 +175,11 @@
mean -= mean % (divisor / 1000);
std_dev -= std_dev % (divisor / 1000);
}
- os << name_ << ": " << std::setw(8)
- << FormatDuration(mean * kAdjust, tu) << " std_dev "
- << FormatDuration(std_dev * kAdjust, tu) << " " << labels_[i] << "\n";
+ os << StringPrintf("%s: %10s (std_dev %8s) %s\n",
+ name_.c_str(),
+ FormatDuration(mean * kAdjust, tu).c_str(),
+ FormatDuration(std_dev * kAdjust, tu).c_str(),
+ labels_[i].c_str());
}
uint64_t total_mean_x2 = total_time_squared_;
uint64_t mean_total_ns = GetTotalTime();
diff --git a/src/verifier/method_verifier.cc b/src/verifier/method_verifier.cc
index 41098cb..6611d3c 100644
--- a/src/verifier/method_verifier.cc
+++ b/src/verifier/method_verifier.cc
@@ -295,7 +295,7 @@
uint64_t start_ns = NanoTime();
MethodVerifier verifier(dex_file, dex_cache, class_loader, class_def_idx, code_item, method_idx,
- method, method_access_flags);
+ method, method_access_flags, true);
if (verifier.Verify()) {
// Verification completed, however failures may be pending that didn't cause the verification
// to hard fail.
@@ -331,7 +331,7 @@
const DexFile::CodeItem* code_item, AbstractMethod* method,
uint32_t method_access_flags) {
MethodVerifier verifier(dex_file, dex_cache, class_loader, class_def_idx, code_item,
- dex_method_idx, method, method_access_flags);
+ dex_method_idx, method, method_access_flags, true);
verifier.Verify();
verifier.DumpFailures(os);
os << verifier.info_messages_.str();
@@ -346,15 +346,17 @@
AbstractMethod* method,
uint32_t method_access_flags, uint32_t dex_pc) {
MethodVerifier verifier(dex_file, dex_cache, class_loader, class_def_idx, code_item,
- dex_method_idx, method, method_access_flags);
+ dex_method_idx, method, method_access_flags, true);
verifier.Verify();
return verifier.DescribeVRegs(dex_pc);
}
MethodVerifier::MethodVerifier(const DexFile* dex_file, DexCache* dex_cache,
ClassLoader* class_loader, uint32_t class_def_idx, const DexFile::CodeItem* code_item,
- uint32_t dex_method_idx, AbstractMethod* method, uint32_t method_access_flags)
- : work_insn_idx_(-1),
+ uint32_t dex_method_idx, AbstractMethod* method, uint32_t method_access_flags,
+ bool can_load_classes)
+ : reg_types_(can_load_classes),
+ work_insn_idx_(-1),
dex_method_idx_(dex_method_idx),
foo_method_(method),
method_access_flags_(method_access_flags),
@@ -368,7 +370,8 @@
have_pending_hard_failure_(false),
have_pending_runtime_throw_failure_(false),
new_instance_count_(0),
- monitor_enter_count_(0) {
+ monitor_enter_count_(0),
+ can_load_classes_(can_load_classes) {
}
void MethodVerifier::FindLocksAtDexPc(AbstractMethod* m, uint32_t dex_pc,
@@ -376,7 +379,7 @@
MethodHelper mh(m);
MethodVerifier verifier(&mh.GetDexFile(), mh.GetDexCache(), mh.GetClassLoader(),
mh.GetClassDefIndex(), mh.GetCodeItem(), m->GetDexMethodIndex(),
- m, m->GetAccessFlags());
+ m, m->GetAccessFlags(), false);
verifier.interesting_dex_pc_ = dex_pc;
verifier.monitor_enter_dex_pcs_ = &monitor_enter_dex_pcs;
verifier.FindLocksAtDexPc();
diff --git a/src/verifier/method_verifier.h b/src/verifier/method_verifier.h
index b676269..a02cc25 100644
--- a/src/verifier/method_verifier.h
+++ b/src/verifier/method_verifier.h
@@ -227,7 +227,7 @@
private:
explicit MethodVerifier(const DexFile* dex_file, DexCache* dex_cache,
ClassLoader* class_loader, uint32_t class_def_idx, const DexFile::CodeItem* code_item,
- uint32_t method_idx, AbstractMethod* method, uint32_t access_flags)
+ uint32_t method_idx, AbstractMethod* method, uint32_t access_flags, bool can_load_classes)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Adds the given string to the beginning of the last failure message.
@@ -670,6 +670,8 @@
// The number of occurrences of specific opcodes.
size_t new_instance_count_;
size_t monitor_enter_count_;
+
+ const bool can_load_classes_;
};
std::ostream& operator<<(std::ostream& os, const MethodVerifier::FailureKind& rhs);
diff --git a/src/verifier/reg_type.cc b/src/verifier/reg_type.cc
index dc41ceb..ab1da1e 100644
--- a/src/verifier/reg_type.cc
+++ b/src/verifier/reg_type.cc
@@ -173,7 +173,7 @@
}
} else {
if (!IsUnresolvedMergedReference() && !IsUnresolvedSuperClass() &&
- GetDescriptor()->CharAt(0) == '[') {
+ GetDescriptor()[0] == '[') {
// Super class of all arrays is Object.
return cache->JavaLangObject(true);
} else {
diff --git a/src/verifier/reg_type.h b/src/verifier/reg_type.h
index c610e06..65ee88a 100644
--- a/src/verifier/reg_type.h
+++ b/src/verifier/reg_type.h
@@ -263,9 +263,8 @@
Class* GetClass() const {
DCHECK(!IsUnresolvedReference());
- DCHECK(klass_or_descriptor_ != NULL);
- DCHECK(klass_or_descriptor_->IsClass());
- return down_cast<Class*>(klass_or_descriptor_);
+ DCHECK(klass_ != NULL);
+ return klass_;
}
bool IsJavaLangObject() const {
@@ -274,7 +273,7 @@
bool IsArrayTypes() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (IsUnresolvedTypes() && !IsUnresolvedMergedReference() && !IsUnresolvedSuperClass()) {
- return GetDescriptor()->CharAt(0) == '[';
+ return descriptor_[0] == '[';
} else if (HasClass()) {
return GetClass()->IsArrayClass();
} else {
@@ -285,8 +284,8 @@
bool IsObjectArrayTypes() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (IsUnresolvedTypes() && !IsUnresolvedMergedReference() && !IsUnresolvedSuperClass()) {
// Primitive arrays will always resolve
- DCHECK(GetDescriptor()->CharAt(1) == 'L' || GetDescriptor()->CharAt(1) == '[');
- return GetDescriptor()->CharAt(0) == '[';
+ DCHECK(descriptor_[1] == 'L' || descriptor_[1] == '[');
+ return descriptor_[0] == '[';
} else if (HasClass()) {
Class* type = GetClass();
return type->IsArrayClass() && !type->GetComponentType()->IsPrimitive();
@@ -330,11 +329,9 @@
return IsUnresolvedTypes() || (IsNonZeroReferenceTypes() && GetClass()->IsInstantiable());
}
- String* GetDescriptor() const {
+ std::string GetDescriptor() const {
DCHECK(IsUnresolvedTypes() && !IsUnresolvedMergedReference() && !IsUnresolvedSuperClass());
- DCHECK(klass_or_descriptor_ != NULL);
- DCHECK(klass_or_descriptor_->GetClass()->IsStringClass());
- return down_cast<String*>(klass_or_descriptor_);
+ return descriptor_;
}
uint16_t GetId() const {
@@ -402,9 +399,9 @@
private:
friend class RegTypeCache;
- RegType(Type type, Object* klass_or_descriptor,
+ RegType(Type type, Class* klass,
uint32_t allocation_pc_or_constant_or_merged_types, uint16_t cache_id)
- : type_(type), klass_or_descriptor_(klass_or_descriptor),
+ : type_(type), klass_(klass),
allocation_pc_or_constant_or_merged_types_(allocation_pc_or_constant_or_merged_types),
cache_id_(cache_id) {
DCHECK(IsConstant() || IsConstantLo() || IsConstantHi() ||
@@ -412,16 +409,26 @@
allocation_pc_or_constant_or_merged_types == 0);
if (!IsConstant() && !IsLongConstant() && !IsLongConstantHigh() && !IsUndefined() &&
!IsConflict() && !IsUnresolvedMergedReference() && !IsUnresolvedSuperClass()) {
- DCHECK(klass_or_descriptor != NULL);
- DCHECK(IsUnresolvedTypes() || klass_or_descriptor_->IsClass());
- DCHECK(!IsUnresolvedTypes() || klass_or_descriptor_->GetClass()->IsStringClass());
+ DCHECK(klass_ != NULL);
+ DCHECK(klass_->IsClass());
+ DCHECK(!IsUnresolvedTypes());
}
}
+ RegType(Type type, const std::string& descriptor, uint32_t allocation_pc, uint16_t cache_id)
+ : type_(type),
+ klass_(NULL),
+ descriptor_(descriptor),
+ allocation_pc_or_constant_or_merged_types_(allocation_pc),
+ cache_id_(cache_id) {
+ }
+
const Type type_; // The current type of the register
- // If known the type of the register, else a String for the descriptor
- Object* klass_or_descriptor_;
+ // If known the type of the register...
+ Class* klass_;
+ // ...else a String for the descriptor.
+ std::string descriptor_;
// Overloaded field that:
// - if IsConstant() holds a 32bit constant value
diff --git a/src/verifier/reg_type_cache.cc b/src/verifier/reg_type_cache.cc
index 1b91321..3bf5ad8 100644
--- a/src/verifier/reg_type_cache.cc
+++ b/src/verifier/reg_type_cache.cc
@@ -74,11 +74,11 @@
DCHECK_GT(entries_.size(), static_cast<size_t>(type));
RegType* entry = entries_[type];
if (entry == NULL) {
- Class* klass = NULL;
+ Class* c = NULL;
if (strlen(descriptor) != 0) {
- klass = Runtime::Current()->GetClassLinker()->FindSystemClass(descriptor);
+ c = Runtime::Current()->GetClassLinker()->FindSystemClass(descriptor);
}
- entry = new RegType(type, klass, 0, type);
+ entry = new RegType(type, c, 0, type);
entries_[type] = entry;
}
return *entry;
@@ -95,30 +95,37 @@
return *cur_entry;
}
} else if (cur_entry->IsUnresolvedReference() &&
- cur_entry->GetDescriptor()->Equals(descriptor)) {
+ cur_entry->GetDescriptor() == descriptor) {
return *cur_entry;
}
}
- Class* klass = Runtime::Current()->GetClassLinker()->FindClass(descriptor, loader);
- if (klass != NULL) {
+ ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
+ Class* c;
+ if (can_load_classes_) {
+ c = class_linker->FindClass(descriptor, loader);
+ } else {
+ c = class_linker->LookupClass(descriptor, loader);
+ }
+ if (c != NULL) {
// Able to resolve so create resolved register type that is precise if we
// know the type is final.
- RegType* entry = new RegType(klass->IsFinal() ? RegType::kRegTypePreciseReference
- : RegType::kRegTypeReference,
- klass, 0, entries_.size());
+ RegType* entry = new RegType(c->IsFinal() ? RegType::kRegTypePreciseReference
+ : RegType::kRegTypeReference,
+ c, 0, entries_.size());
entries_.push_back(entry);
return *entry;
} else {
// TODO: we assume unresolved, but we may be able to do better by validating whether the
// descriptor string is valid
// Unable to resolve so create unresolved register type
- DCHECK(Thread::Current()->IsExceptionPending());
- Thread::Current()->ClearException();
+ if (can_load_classes_) {
+ DCHECK(Thread::Current()->IsExceptionPending());
+ Thread::Current()->ClearException();
+ } else {
+ DCHECK(!Thread::Current()->IsExceptionPending());
+ }
if (IsValidDescriptor(descriptor)) {
- String* string_descriptor =
- Runtime::Current()->GetInternTable()->InternStrong(descriptor);
- RegType* entry = new RegType(RegType::kRegTypeUnresolvedReference, string_descriptor, 0,
- entries_.size());
+ RegType* entry = new RegType(RegType::kRegTypeUnresolvedReference, descriptor, 0, entries_.size());
entries_.push_back(entry);
return *entry;
} else {
@@ -214,7 +221,7 @@
const RegType& RegTypeCache::Uninitialized(const RegType& type, uint32_t allocation_pc) {
RegType* entry;
if (type.IsUnresolvedTypes()) {
- String* descriptor = type.GetDescriptor();
+ std::string descriptor(type.GetDescriptor());
for (size_t i = RegType::kRegTypeLastFixedLocation + 1; i < entries_.size(); i++) {
RegType* cur_entry = entries_[i];
if (cur_entry->IsUnresolvedAndUninitializedReference() &&
@@ -245,7 +252,7 @@
const RegType& RegTypeCache::FromUninitialized(const RegType& uninit_type) {
RegType* entry;
if (uninit_type.IsUnresolvedTypes()) {
- String* descriptor = uninit_type.GetDescriptor();
+ std::string descriptor(uninit_type.GetDescriptor());
for (size_t i = RegType::kRegTypeLastFixedLocation + 1; i < entries_.size(); i++) {
RegType* cur_entry = entries_[i];
if (cur_entry->IsUnresolvedReference() && cur_entry->GetDescriptor() == descriptor) {
@@ -271,7 +278,7 @@
// TODO: implement descriptor version.
RegType* entry;
if (type.IsUnresolvedTypes()) {
- String* descriptor = type.GetDescriptor();
+ std::string descriptor(type.GetDescriptor());
for (size_t i = RegType::kRegTypeLastFixedLocation + 1; i < entries_.size(); i++) {
RegType* cur_entry = entries_[i];
if (cur_entry->IsUnresolvedAndUninitializedThisReference() &&
@@ -354,7 +361,7 @@
const RegType& RegTypeCache::GetComponentType(const RegType& array, ClassLoader* loader) {
CHECK(array.IsArrayTypes());
if (array.IsUnresolvedTypes()) {
- std::string descriptor(array.GetDescriptor()->ToModifiedUtf8());
+ std::string descriptor(array.GetDescriptor());
std::string component(descriptor.substr(1, descriptor.size() - 1));
return FromDescriptor(loader, component.c_str(), false);
} else {
diff --git a/src/verifier/reg_type_cache.h b/src/verifier/reg_type_cache.h
index 1cd3fba..54f42fd 100644
--- a/src/verifier/reg_type_cache.h
+++ b/src/verifier/reg_type_cache.h
@@ -26,7 +26,8 @@
class RegTypeCache {
public:
- explicit RegTypeCache() : entries_(RegType::kRegTypeLastFixedLocation + 1) {
+ explicit RegTypeCache(bool can_load_classes)
+ : entries_(RegType::kRegTypeLastFixedLocation + 1), can_load_classes_(can_load_classes) {
Undefined(); // ensure Undefined is initialized
}
~RegTypeCache() {
@@ -142,6 +143,8 @@
private:
// The allocated entries
std::vector<RegType*> entries_;
+ // Whether or not we're allowed to load classes.
+ const bool can_load_classes_;
DISALLOW_COPY_AND_ASSIGN(RegTypeCache);
};
diff --git a/src/verifier/reg_type_test.cc b/src/verifier/reg_type_test.cc
index 18e9a65..c66477c 100644
--- a/src/verifier/reg_type_test.cc
+++ b/src/verifier/reg_type_test.cc
@@ -27,7 +27,7 @@
TEST_F(RegTypeTest, Primitives) {
ScopedObjectAccess soa(Thread::Current());
- RegTypeCache cache;
+ RegTypeCache cache(true);
const RegType& bool_reg_type = cache.Boolean();
EXPECT_FALSE(bool_reg_type.IsUndefined());