summaryrefslogtreecommitdiff
path: root/compiler/optimizing/optimizing_compiler.cc
diff options
context:
space:
mode:
author David Sehr <sehr@google.com> 2016-10-13 09:12:37 -0700
committer David Sehr <sehr@google.com> 2016-10-18 14:10:04 -0700
commit709b070044354d9f47641f273edacaeeb0240ab7 (patch)
tree3a8ac051d7c35076303984d0d892cdd396b60427 /compiler/optimizing/optimizing_compiler.cc
parent1a4de6a2453a3ad0310aca1a44e7e2d3b6f53bc1 (diff)
Remove mirror:: and ArtMethod deps in utils.{h,cc}
The latest chapter in the ongoing saga of attempting to dump a DEX file without having to start a whole runtime instance. This episode finds us removing references to ArtMethod/ArtField/mirror. One aspect of this change that I would like to call out specfically is that the utils versions of the "Pretty*" functions all were written to accept nullptr as an argument. I have split these functions up as follows: 1) an instance method, such as PrettyClass that obviously requires this != nullptr. 2) a static method, that behaves the same way as the util method, but calls the instance method if p != nullptr. This requires using a full class qualifier for the static methods, which isn't exactly beautiful. I have tried to remove as many cases as possible where it was clear p != nullptr. Bug: 22322814 Test: test-art-host Change-Id: I21adee3614aa697aa580cd1b86b72d9206e1cb24
Diffstat (limited to 'compiler/optimizing/optimizing_compiler.cc')
-rw-r--r--compiler/optimizing/optimizing_compiler.cc8
1 files changed, 4 insertions, 4 deletions
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 4370a84bd2..6ba0963720 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -173,7 +173,7 @@ class PassObserver : public ValueObject {
const char* GetMethodName() {
// PrettyMethod() is expensive, so we delay calling it until we actually have to.
if (cached_method_name_.empty()) {
- cached_method_name_ = PrettyMethod(graph_->GetMethodIdx(), graph_->GetDexFile());
+ cached_method_name_ = graph_->GetDexFile().PrettyMethod(graph_->GetMethodIdx());
}
return cached_method_name_.c_str();
}
@@ -1044,7 +1044,7 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
if (kArenaAllocatorCountAllocations) {
if (arena.BytesAllocated() > kArenaAllocatorMemoryReportThreshold) {
MemStats mem_stats(arena.GetMemStats());
- LOG(INFO) << PrettyMethod(method_idx, dex_file) << " " << Dumpable<MemStats>(mem_stats);
+ LOG(INFO) << dex_file.PrettyMethod(method_idx) << " " << Dumpable<MemStats>(mem_stats);
}
}
}
@@ -1066,7 +1066,7 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
// instruction set is supported -- and has support for read
// barriers, if they are enabled). This makes sure we're not
// regressing.
- std::string method_name = PrettyMethod(method_idx, dex_file);
+ std::string method_name = dex_file.PrettyMethod(method_idx);
bool shouldCompile = method_name.find("$opt$") != std::string::npos;
DCHECK((method != nullptr) || !shouldCompile) << "Didn't compile " << method_name;
}
@@ -1131,7 +1131,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
if (kArenaAllocatorCountAllocations) {
if (arena.BytesAllocated() > kArenaAllocatorMemoryReportThreshold) {
MemStats mem_stats(arena.GetMemStats());
- LOG(INFO) << PrettyMethod(method_idx, *dex_file) << " " << Dumpable<MemStats>(mem_stats);
+ LOG(INFO) << dex_file->PrettyMethod(method_idx) << " " << Dumpable<MemStats>(mem_stats);
}
}
}