Add more logging and sanity checks for JIT mini-debug-info.
Used when diagnosing b/151137723. Keep it around.
Bug: 151137723
Test: test.py -r --jit
Change-Id: I10cc613c7396607e221fdc1f5972d26c1ac03fa8
diff --git a/runtime/jit/debugger_interface.cc b/runtime/jit/debugger_interface.cc
index 0efa4d2..2f929bb 100644
--- a/runtime/jit/debugger_interface.cc
+++ b/runtime/jit/debugger_interface.cc
@@ -25,6 +25,7 @@
#include "base/time_utils.h"
#include "base/utils.h"
#include "dex/dex_file.h"
+#include "elf/elf_debug_reader.h"
#include "jit/jit.h"
#include "jit/jit_code_cache.h"
#include "jit/jit_memory_region.h"
@@ -606,6 +607,8 @@
// Method removal is very expensive since we need to decompress and read ELF files.
// Collet methods to be removed and do the removal in bulk later.
g_removed_jit_functions.push_back(code_ptr);
+
+ VLOG(jit) << "JIT mini-debug-info removed for " << code_ptr;
}
void RepackNativeDebugInfoForJitLocked() {
@@ -645,4 +648,18 @@
return &g_jit_debug_lock;
}
+void ForEachNativeDebugSymbol(std::function<void(const void*, size_t, const char*)> cb) {
+ MutexLock mu(Thread::Current(), g_jit_debug_lock);
+ using ElfRuntimeTypes = std::conditional<sizeof(void*) == 4, ElfTypes32, ElfTypes64>::type;
+ for (const JITCodeEntry* it = __jit_debug_descriptor.head_; it != nullptr; it = it->next_) {
+ ArrayRef<const uint8_t> buffer(it->symfile_addr_, it->symfile_size_);
+ if (!buffer.empty()) {
+ ElfDebugReader<ElfRuntimeTypes> reader(buffer);
+ reader.VisitFunctionSymbols([&](ElfRuntimeTypes::Sym sym, const char* name) {
+ cb(reinterpret_cast<const void*>(sym.st_value), sym.st_size, name);
+ });
+ }
+ }
+}
+
} // namespace art
diff --git a/runtime/jit/debugger_interface.h b/runtime/jit/debugger_interface.h
index 8433e69..d6a8063 100644
--- a/runtime/jit/debugger_interface.h
+++ b/runtime/jit/debugger_interface.h
@@ -72,6 +72,9 @@
// TODO: Unwinding should be race-free. Remove this.
Mutex* GetNativeDebugInfoLock();
+// Call given callback for every stored symbol. The callback parameters are (address, size, name).
+void ForEachNativeDebugSymbol(std::function<void(const void*, size_t, const char*)> cb);
+
} // namespace art
#endif // ART_RUNTIME_JIT_DEBUGGER_INTERFACE_H_
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 75ce1c0..29951a7 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -173,17 +173,21 @@
return methods_;
}
- void RemoveMethodsIn(const LinearAlloc& alloc) {
- auto kept_end = std::remove_if(
+ void RemoveMethodsIn(const LinearAlloc& alloc) REQUIRES_SHARED(Locks::mutator_lock_) {
+ auto kept_end = std::partition(
methods_.begin(),
methods_.end(),
- [&alloc](ArtMethod* method) { return alloc.ContainsUnsafe(method); });
+ [&alloc](ArtMethod* method) { return !alloc.ContainsUnsafe(method); });
+ for (auto it = kept_end; it != methods_.end(); it++) {
+ VLOG(jit) << "JIT removed (JNI) " << (*it)->PrettyMethod() << ": " << code_;
+ }
methods_.erase(kept_end, methods_.end());
}
- bool RemoveMethod(ArtMethod* method) {
+ bool RemoveMethod(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
auto it = std::find(methods_.begin(), methods_.end(), method);
if (it != methods_.end()) {
+ VLOG(jit) << "JIT removed (JNI) " << (*it)->PrettyMethod() << ": " << code_;
methods_.erase(it);
return true;
} else {
@@ -497,6 +501,26 @@
// We have potentially removed a lot of debug info. Do maintenance pass to save space.
RepackNativeDebugInfoForJit();
+
+ // Check that the set of compiled methods exactly matches native debug information.
+ if (kIsDebugBuild) {
+ std::map<const void*, ArtMethod*> compiled_methods;
+ VisitAllMethods([&](const void* addr, ArtMethod* method) {
+ CHECK(addr != nullptr && method != nullptr);
+ compiled_methods.emplace(addr, method);
+ });
+ std::set<const void*> debug_info;
+ ForEachNativeDebugSymbol([&](const void* addr, size_t, const char* name) {
+ addr = AlignDown(addr, GetInstructionSetInstructionAlignment(kRuntimeISA)); // Thumb-bit.
+ CHECK(debug_info.emplace(addr).second) << "Duplicate debug info: " << addr << " " << name;
+ CHECK_EQ(compiled_methods.count(addr), 1u) << "Extra debug info: " << addr << " " << name;
+ });
+ if (!debug_info.empty()) { // If debug-info generation is enabled.
+ for (auto it : compiled_methods) {
+ CHECK_EQ(debug_info.count(it.first), 1u) << "No debug info: " << it.second->PrettyMethod();
+ }
+ }
+ }
}
void JitCodeCache::RemoveMethodsIn(Thread* self, const LinearAlloc& alloc) {
@@ -525,6 +549,7 @@
for (auto it = method_code_map_.begin(); it != method_code_map_.end();) {
if (alloc.ContainsUnsafe(it->second)) {
method_headers.insert(OatQuickMethodHeader::FromCodePointer(it->first));
+ VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
it = method_code_map_.erase(it);
} else {
++it;
@@ -636,6 +661,8 @@
ArrayRef<const uint8_t> reserved_data,
const std::vector<Handle<mirror::Object>>& roots,
ArrayRef<const uint8_t> stack_map,
+ const std::vector<uint8_t>& debug_info,
+ bool is_full_debug_info,
bool osr,
bool has_should_deoptimize_flag,
const ArenaSet<ArtMethod*>& cha_single_implementation_list) {
@@ -669,6 +696,13 @@
number_of_compilations_++;
+ // We need to update the debug info before the entry point gets set.
+ // At the same time we want to do under JIT lock so that debug info and JIT maps are in sync.
+ if (!debug_info.empty()) {
+ // NB: Don't allow packing of full info since it would remove non-backtrace data.
+ AddNativeDebugInfoForJit(code_ptr, debug_info, /*allow_packing=*/ !is_full_debug_info);
+ }
+
// We need to update the entry point in the runnable state for the instrumentation.
{
// The following needs to be guarded by cha_lock_ also. Otherwise it's possible that the
@@ -811,6 +845,7 @@
if (release_memory) {
FreeCodeAndData(it->first);
}
+ VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
it = method_code_map_.erase(it);
} else {
++it;
@@ -1198,6 +1233,9 @@
++it;
} else {
method_headers.insert(OatQuickMethodHeader::FromCodePointer(data->GetCode()));
+ for (ArtMethod* method : data->GetMethods()) {
+ VLOG(jit) << "JIT removed (JNI) " << method->PrettyMethod() << ": " << data->GetCode();
+ }
it = jni_stubs_map_.erase(it);
}
}
@@ -1209,6 +1247,7 @@
} else {
OatQuickMethodHeader* header = OatQuickMethodHeader::FromCodePointer(code_ptr);
method_headers.insert(header);
+ VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
it = method_code_map_.erase(it);
}
}
@@ -1873,6 +1912,28 @@
return Runtime::Current()->IsZygote() ? &shared_region_ : &private_region_;
}
+void JitCodeCache::VisitAllMethods(const std::function<void(const void*, ArtMethod*)>& cb) {
+ for (const auto& it : jni_stubs_map_) {
+ const JniStubData& data = it.second;
+ if (data.IsCompiled()) {
+ for (ArtMethod* method : data.GetMethods()) {
+ cb(data.GetCode(), method);
+ }
+ }
+ }
+ for (auto it : method_code_map_) { // Includes OSR methods.
+ cb(it.first, it.second);
+ }
+ for (auto it : saved_compiled_methods_map_) {
+ cb(it.second, it.first);
+ }
+ for (auto it : zygote_map_) {
+ if (it.code_ptr != nullptr && it.method != nullptr) {
+ cb(it.code_ptr, it.method);
+ }
+ }
+}
+
void ZygoteMap::Initialize(uint32_t number_of_methods) {
MutexLock mu(Thread::Current(), *Locks::jit_lock_);
// Allocate for 40-80% capacity. This will offer OK lookup times, and termination
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 9ef1e4f..50e1e2b 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -266,6 +266,8 @@
ArrayRef<const uint8_t> reserved_data, // Uninitialized destination.
const std::vector<Handle<mirror::Object>>& roots,
ArrayRef<const uint8_t> stack_map, // Compiler output (source).
+ const std::vector<uint8_t>& debug_info,
+ bool is_full_debug_info,
bool osr,
bool has_should_deoptimize_flag,
const ArenaSet<ArtMethod*>& cha_single_implementation_list)
@@ -440,6 +442,10 @@
REQUIRES(Locks::jit_lock_)
REQUIRES(Locks::mutator_lock_);
+ // Call given callback for every compiled method in the code cache.
+ void VisitAllMethods(const std::function<void(const void*, ArtMethod*)>& cb)
+ REQUIRES(Locks::jit_lock_);
+
// Free code and data allocations for `code_ptr`.
void FreeCodeAndData(const void* code_ptr)
REQUIRES(Locks::jit_lock_)