Remove mirror:: and ArtMethod deps in utils.{h,cc}
The latest chapter in the ongoing saga of attempting to dump a DEX
file without having to start a whole runtime instance. This episode
finds us removing references to ArtMethod/ArtField/mirror.
One aspect of this change that I would like to call out specfically
is that the utils versions of the "Pretty*" functions all were written
to accept nullptr as an argument. I have split these functions up as
follows:
1) an instance method, such as PrettyClass that obviously requires
this != nullptr.
2) a static method, that behaves the same way as the util method, but
calls the instance method if p != nullptr.
This requires using a full class qualifier for the static methods,
which isn't exactly beautiful. I have tried to remove as many cases
as possible where it was clear p != nullptr.
Bug: 22322814
Test: test-art-host
Change-Id: I21adee3614aa697aa580cd1b86b72d9206e1cb24
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 3977e49..23fb79d 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -134,7 +134,8 @@
static std::ostream& operator<<(std::ostream& os, const Breakpoint& rhs)
REQUIRES_SHARED(Locks::mutator_lock_) {
- os << StringPrintf("Breakpoint[%s @%#x]", PrettyMethod(rhs.Method()).c_str(), rhs.DexPc());
+ os << StringPrintf("Breakpoint[%s @%#x]", ArtMethod::PrettyMethod(rhs.Method()).c_str(),
+ rhs.DexPc());
return os;
}
@@ -190,7 +191,7 @@
ArtMethod* method, uint32_t dex_pc)
OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
// We're not recorded to listen to this kind of event, so complain.
- LOG(ERROR) << "Unexpected method unwind event in debugger " << PrettyMethod(method)
+ LOG(ERROR) << "Unexpected method unwind event in debugger " << ArtMethod::PrettyMethod(method)
<< " " << dex_pc;
}
@@ -236,7 +237,7 @@
// We only care about branches in the Jit.
void Branch(Thread* /*thread*/, ArtMethod* method, uint32_t dex_pc, int32_t dex_pc_offset)
OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
- LOG(ERROR) << "Unexpected branch event in debugger " << PrettyMethod(method)
+ LOG(ERROR) << "Unexpected branch event in debugger " << ArtMethod::PrettyMethod(method)
<< " " << dex_pc << ", " << dex_pc_offset;
}
@@ -247,7 +248,7 @@
uint32_t dex_pc,
ArtMethod*)
OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
- LOG(ERROR) << "Unexpected invoke event in debugger " << PrettyMethod(method)
+ LOG(ERROR) << "Unexpected invoke event in debugger " << ArtMethod::PrettyMethod(method)
<< " " << dex_pc;
}
@@ -1301,7 +1302,7 @@
if (new_object == nullptr) {
DCHECK(self->IsExceptionPending());
self->ClearException();
- LOG(ERROR) << "Could not allocate object of type " << PrettyDescriptor(c);
+ LOG(ERROR) << "Could not allocate object of type " << mirror::Class::PrettyDescriptor(c);
*new_object_id = 0;
return JDWP::ERR_OUT_OF_MEMORY;
}
@@ -1328,7 +1329,7 @@
if (new_array == nullptr) {
DCHECK(self->IsExceptionPending());
self->ClearException();
- LOG(ERROR) << "Could not allocate array of type " << PrettyDescriptor(c);
+ LOG(ERROR) << "Could not allocate array of type " << mirror::Class::PrettyDescriptor(c);
*new_array_id = 0;
return JDWP::ERR_OUT_OF_MEMORY;
}
@@ -1449,7 +1450,7 @@
if (code_item == nullptr) {
// We should not get here for a method without code (native, proxy or abstract). Log it and
// return the slot as is since all registers are arguments.
- LOG(WARNING) << "Trying to mangle slot for method without code " << PrettyMethod(m);
+ LOG(WARNING) << "Trying to mangle slot for method without code " << m->PrettyMethod();
return slot;
}
uint16_t ins_size = code_item->ins_size_;
@@ -1480,7 +1481,8 @@
if (code_item == nullptr) {
// We should not get here for a method without code (native, proxy or abstract). Log it and
// return the slot as is since all registers are arguments.
- LOG(WARNING) << "Trying to demangle slot for method without code " << PrettyMethod(m);
+ LOG(WARNING) << "Trying to demangle slot for method without code "
+ << m->PrettyMethod();
uint16_t vreg_count = GetMethodNumArgRegistersIncludingThis(m);
if (slot < vreg_count) {
*error = JDWP::ERR_NONE;
@@ -1496,7 +1498,7 @@
}
// Slot is invalid in the method.
- LOG(ERROR) << "Invalid local slot " << slot << " for method " << PrettyMethod(m);
+ LOG(ERROR) << "Invalid local slot " << slot << " for method " << m->PrettyMethod();
*error = JDWP::ERR_INVALID_SLOT;
return DexFile::kDexNoIndex16;
}
@@ -1784,14 +1786,16 @@
// TODO: should we give up now if receiver_class is null?
if (receiver_class != nullptr && !f->GetDeclaringClass()->IsAssignableFrom(receiver_class)) {
- LOG(INFO) << "ERR_INVALID_FIELDID: " << PrettyField(f) << " " << PrettyClass(receiver_class);
+ LOG(INFO) << "ERR_INVALID_FIELDID: " << f->PrettyField() << " "
+ << receiver_class->PrettyClass();
return JDWP::ERR_INVALID_FIELDID;
}
// Ensure the field's class is initialized.
Handle<mirror::Class> klass(hs.NewHandle(f->GetDeclaringClass()));
if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, klass, true, false)) {
- LOG(WARNING) << "Not able to initialize class for SetValues: " << PrettyClass(klass.Get());
+ LOG(WARNING) << "Not able to initialize class for SetValues: "
+ << mirror::Class::PrettyClass(klass.Get());
}
// The RI only enforces the static/non-static mismatch in one direction.
@@ -1803,7 +1807,7 @@
} else {
if (f->IsStatic()) {
LOG(WARNING) << "Ignoring non-nullptr receiver for ObjectReference.GetValues"
- << " on static field " << PrettyField(f);
+ << " on static field " << f->PrettyField();
}
}
if (f->IsStatic()) {
@@ -1912,7 +1916,8 @@
// Ensure the field's class is initialized.
Handle<mirror::Class> klass(hs.NewHandle(f->GetDeclaringClass()));
if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, klass, true, false)) {
- LOG(WARNING) << "Not able to initialize class for SetValues: " << PrettyClass(klass.Get());
+ LOG(WARNING) << "Not able to initialize class for SetValues: "
+ << mirror::Class::PrettyClass(klass.Get());
}
// The RI only enforces the static/non-static mismatch in one direction.
@@ -1924,7 +1929,7 @@
} else {
if (f->IsStatic()) {
LOG(WARNING) << "Ignoring non-nullptr receiver for ObjectReference.SetValues"
- << " on static field " << PrettyField(f);
+ << " on static field " << f->PrettyField();
}
}
if (f->IsStatic()) {
@@ -2581,7 +2586,7 @@
static std::string GetStackContextAsString(const StackVisitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_) {
return StringPrintf(" at DEX pc 0x%08x in method %s", visitor.GetDexPc(false),
- PrettyMethod(visitor.GetMethod()).c_str());
+ ArtMethod::PrettyMethod(visitor.GetMethod()).c_str());
}
static JDWP::JdwpError FailGetLocalValue(const StackVisitor& visitor, uint16_t vreg,
@@ -3150,14 +3155,14 @@
VLOG(jdwp) << "Undeoptimize the world DONE";
break;
case DeoptimizationRequest::kSelectiveDeoptimization:
- VLOG(jdwp) << "Deoptimize method " << PrettyMethod(request.Method()) << " ...";
+ VLOG(jdwp) << "Deoptimize method " << ArtMethod::PrettyMethod(request.Method()) << " ...";
instrumentation->Deoptimize(request.Method());
- VLOG(jdwp) << "Deoptimize method " << PrettyMethod(request.Method()) << " DONE";
+ VLOG(jdwp) << "Deoptimize method " << ArtMethod::PrettyMethod(request.Method()) << " DONE";
break;
case DeoptimizationRequest::kSelectiveUndeoptimization:
- VLOG(jdwp) << "Undeoptimize method " << PrettyMethod(request.Method()) << " ...";
+ VLOG(jdwp) << "Undeoptimize method " << ArtMethod::PrettyMethod(request.Method()) << " ...";
instrumentation->Undeoptimize(request.Method());
- VLOG(jdwp) << "Undeoptimize method " << PrettyMethod(request.Method()) << " DONE";
+ VLOG(jdwp) << "Undeoptimize method " << ArtMethod::PrettyMethod(request.Method()) << " DONE";
break;
default:
LOG(FATAL) << "Unsupported deoptimization request kind " << request.GetKind();
@@ -3226,14 +3231,14 @@
case DeoptimizationRequest::kSelectiveDeoptimization: {
DCHECK(req.Method() != nullptr);
VLOG(jdwp) << "Queue request #" << deoptimization_requests_.size()
- << " for deoptimization of " << PrettyMethod(req.Method());
+ << " for deoptimization of " << req.Method()->PrettyMethod();
deoptimization_requests_.push_back(req);
break;
}
case DeoptimizationRequest::kSelectiveUndeoptimization: {
DCHECK(req.Method() != nullptr);
VLOG(jdwp) << "Queue request #" << deoptimization_requests_.size()
- << " for undeoptimization of " << PrettyMethod(req.Method());
+ << " for undeoptimization of " << req.Method()->PrettyMethod();
deoptimization_requests_.push_back(req);
break;
}
@@ -3325,7 +3330,7 @@
if (!Dbg::RequiresDeoptimization()) {
// We already run in interpreter-only mode so we don't need to deoptimize anything.
VLOG(jdwp) << "No need for deoptimization when fully running with interpreter for method "
- << PrettyMethod(m);
+ << ArtMethod::PrettyMethod(m);
return DeoptimizationRequest::kNothing;
}
const Breakpoint* first_breakpoint;
@@ -3344,17 +3349,19 @@
bool need_full_deoptimization = m->IsDefault();
if (need_full_deoptimization) {
VLOG(jdwp) << "Need full deoptimization because of copying of method "
- << PrettyMethod(m);
+ << ArtMethod::PrettyMethod(m);
return DeoptimizationRequest::kFullDeoptimization;
} else {
// We don't need to deoptimize if the method has not been compiled.
const bool is_compiled = m->HasAnyCompiledCode();
if (is_compiled) {
- VLOG(jdwp) << "Need selective deoptimization for compiled method " << PrettyMethod(m);
+ VLOG(jdwp) << "Need selective deoptimization for compiled method "
+ << ArtMethod::PrettyMethod(m);
return DeoptimizationRequest::kSelectiveDeoptimization;
} else {
// Method is not compiled: we don't need to deoptimize.
- VLOG(jdwp) << "No need for deoptimization for non-compiled method " << PrettyMethod(m);
+ VLOG(jdwp) << "No need for deoptimization for non-compiled method "
+ << ArtMethod::PrettyMethod(m);
return DeoptimizationRequest::kNothing;
}
}
@@ -3584,7 +3591,8 @@
bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
// The visitor is meant to be used when handling exception from compiled code only.
- CHECK(!IsShadowFrame()) << "We only expect to visit compiled frame: " << PrettyMethod(GetMethod());
+ CHECK(!IsShadowFrame()) << "We only expect to visit compiled frame: "
+ << ArtMethod::PrettyMethod(GetMethod());
ArtMethod* method = GetMethod();
if (method == nullptr) {
// We reach an upcall and don't need to deoptimize this part of the stack (ManagedFragment)
@@ -3810,7 +3818,8 @@
VLOG(jdwp) << "Single-step thread: " << *thread;
VLOG(jdwp) << "Single-step step size: " << single_step_control->GetStepSize();
VLOG(jdwp) << "Single-step step depth: " << single_step_control->GetStepDepth();
- VLOG(jdwp) << "Single-step current method: " << PrettyMethod(single_step_control->GetMethod());
+ VLOG(jdwp) << "Single-step current method: "
+ << ArtMethod::PrettyMethod(single_step_control->GetMethod());
VLOG(jdwp) << "Single-step current line: " << line_number;
VLOG(jdwp) << "Single-step current stack depth: " << single_step_control->GetStackDepth();
VLOG(jdwp) << "Single-step dex_pc values:";
@@ -4066,12 +4075,12 @@
ArtMethod* actual_method =
pReq->klass.Read()->FindVirtualMethodForVirtualOrInterface(m, image_pointer_size);
if (actual_method != m) {
- VLOG(jdwp) << "ExecuteMethod translated " << PrettyMethod(m)
- << " to " << PrettyMethod(actual_method);
+ VLOG(jdwp) << "ExecuteMethod translated " << ArtMethod::PrettyMethod(m)
+ << " to " << ArtMethod::PrettyMethod(actual_method);
m = actual_method;
}
}
- VLOG(jdwp) << "ExecuteMethod " << PrettyMethod(m)
+ VLOG(jdwp) << "ExecuteMethod " << ArtMethod::PrettyMethod(m)
<< " receiver=" << pReq->receiver.Read()
<< " arg_count=" << pReq->arg_count;
CHECK(m != nullptr);
@@ -4873,12 +4882,13 @@
const gc::AllocRecord* record = &it->second;
LOG(INFO) << StringPrintf(" Thread %-2d %6zd bytes ", record->GetTid(), record->ByteCount())
- << PrettyClass(record->GetClass());
+ << mirror::Class::PrettyClass(record->GetClass());
for (size_t stack_frame = 0, depth = record->GetDepth(); stack_frame < depth; ++stack_frame) {
const gc::AllocRecordStackTraceElement& stack_element = record->StackElement(stack_frame);
ArtMethod* m = stack_element.GetMethod();
- LOG(INFO) << " " << PrettyMethod(m) << " line " << stack_element.ComputeLineNumber();
+ LOG(INFO) << " " << ArtMethod::PrettyMethod(m) << " line "
+ << stack_element.ComputeLineNumber();
}
// pause periodically to help logcat catch up