diff options
author | 2015-06-19 14:47:01 +0100 | |
---|---|---|
committer | 2015-06-22 11:57:36 +0100 | |
commit | eb7b7399dbdb5e471b8ae00a567bf4f19edd3907 (patch) | |
tree | a0a488040883c14cc6cbec3186654137459d44ff /compiler/optimizing/code_generator.cc | |
parent | 8ec0e20347e13592539a8c0786b1db1735149800 (diff) |
Opt compiler: Add disassembly to the '.cfg' output.
This is automatically added to the '.cfg' output when using the usual
`--dump-cfg` option.
Change-Id: I864bfc3a8299c042e72e451cc7730ad8271e4deb
Diffstat (limited to 'compiler/optimizing/code_generator.cc')
-rw-r--r-- | compiler/optimizing/code_generator.cc | 50 |
1 files changed, 46 insertions, 4 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 64f2c9a986..cd10935806 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -25,6 +25,7 @@ #include "dex/verified_method.h" #include "driver/dex_compilation_unit.h" #include "gc_map_builder.h" +#include "graph_visualizer.h" #include "leb128.h" #include "mapping_table.h" #include "mirror/array-inl.h" @@ -159,12 +160,55 @@ HBasicBlock* CodeGenerator::FirstNonEmptyBlock(HBasicBlock* block) const { return block; } +class DisassemblyScope { + public: + DisassemblyScope(HInstruction* instruction, const CodeGenerator& codegen) + : codegen_(codegen), instruction_(instruction), start_offset_(static_cast<size_t>(-1)) { + if (codegen_.GetDisassemblyInformation() != nullptr) { + start_offset_ = codegen_.GetAssembler().CodeSize(); + } + } + + ~DisassemblyScope() { + // We avoid building this data when we know it will not be used. + if (codegen_.GetDisassemblyInformation() != nullptr) { + codegen_.GetDisassemblyInformation()->AddInstructionInterval( + instruction_, start_offset_, codegen_.GetAssembler().CodeSize()); + } + } + + private: + const CodeGenerator& codegen_; + HInstruction* instruction_; + size_t start_offset_; +}; + + +void CodeGenerator::GenerateSlowPaths() { + size_t code_start = 0; + for (size_t i = 0, e = slow_paths_.Size(); i < e; ++i) { + if (disasm_info_ != nullptr) { + code_start = GetAssembler()->CodeSize(); + } + slow_paths_.Get(i)->EmitNativeCode(this); + if (disasm_info_ != nullptr) { + disasm_info_->AddSlowPathInterval(slow_paths_.Get(i), code_start, GetAssembler()->CodeSize()); + } + } +} + void CodeGenerator::CompileInternal(CodeAllocator* allocator, bool is_baseline) { is_baseline_ = is_baseline; HGraphVisitor* instruction_visitor = GetInstructionVisitor(); DCHECK_EQ(current_block_index_, 0u); + + size_t frame_start = GetAssembler()->CodeSize(); GenerateFrameEntry(); DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); + if (disasm_info_ != nullptr) { + disasm_info_->SetFrameEntryInterval(frame_start, GetAssembler()->CodeSize()); + } + for (size_t e = block_order_->Size(); current_block_index_ < e; ++current_block_index_) { HBasicBlock* block = block_order_->Get(current_block_index_); // Don't generate code for an empty block. Its predecessors will branch to its successor @@ -174,6 +218,7 @@ void CodeGenerator::CompileInternal(CodeAllocator* allocator, bool is_baseline) Bind(block); for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { HInstruction* current = it.Current(); + DisassemblyScope disassembly_scope(current, *this); if (is_baseline) { InitLocationsBaseline(current); } @@ -182,10 +227,7 @@ void CodeGenerator::CompileInternal(CodeAllocator* allocator, bool is_baseline) } } - // Generate the slow paths. - for (size_t i = 0, e = slow_paths_.Size(); i < e; ++i) { - slow_paths_.Get(i)->EmitNativeCode(this); - } + GenerateSlowPaths(); // Finalize instructions in assember; Finalize(allocator); |