diff options
author | 2017-12-15 11:19:33 -0800 | |
---|---|---|
committer | 2017-12-22 09:41:17 -0800 | |
commit | 808c7a57bb913b13c22884f57cdacd59bf1fdb3f (patch) | |
tree | d7f0d7cabaac5a7646c25bae584a82a9aa279cc0 /compiler/optimizing | |
parent | 64bae9fb677aa0e2406d13ea9f8ebaa92e16f978 (diff) |
Make CodeItem fields private
Make code item fields private and use accessors. Added a hand full of
friend classes to reduce the size of the change.
Changed default to be nullable and removed CreateNullable.
CreateNullable was a bad API since it defaulted to the unsafe, may
add a CreateNonNullable if it's important for performance.
Motivation:
Have a different layout for code items in cdex.
Bug: 63756964
Test: test-art-host-gtest
Test: test/testrunner/testrunner.py --host
Test: art/tools/run-jdwp-tests.sh '--mode=host' '--variant=X32' --debug
Change-Id: I42bc7435e20358682075cb6de52713b595f95bf9
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/block_builder.cc | 71 | ||||
-rw-r--r-- | compiler/optimizing/block_builder.h | 21 | ||||
-rw-r--r-- | compiler/optimizing/builder.cc | 54 | ||||
-rw-r--r-- | compiler/optimizing/builder.h | 20 | ||||
-rw-r--r-- | compiler/optimizing/inliner.cc | 16 | ||||
-rw-r--r-- | compiler/optimizing/instruction_builder.cc | 58 | ||||
-rw-r--r-- | compiler/optimizing/instruction_builder.h | 30 | ||||
-rw-r--r-- | compiler/optimizing/optimization.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 8 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_unit_test.h | 4 |
10 files changed, 165 insertions, 123 deletions
diff --git a/compiler/optimizing/block_builder.cc b/compiler/optimizing/block_builder.cc index 58f591bd1e..c505efafe2 100644 --- a/compiler/optimizing/block_builder.cc +++ b/compiler/optimizing/block_builder.cc @@ -18,10 +18,31 @@ #include "base/logging.h" // FOR VLOG. #include "bytecode_utils.h" +#include "code_item_accessors-inl.h" #include "quicken_info.h" namespace art { +HBasicBlockBuilder::HBasicBlockBuilder(HGraph* graph, + const DexFile* const dex_file, + const CodeItemDebugInfoAccessor& accessor, + ScopedArenaAllocator* local_allocator) + : allocator_(graph->GetAllocator()), + graph_(graph), + dex_file_(dex_file), + code_item_accessor_(accessor), + local_allocator_(local_allocator), + branch_targets_(code_item_accessor_.HasCodeItem() + ? code_item_accessor_.InsnsSizeInCodeUnits() + : /* fake dex_pc=0 for intrinsic graph */ 1u, + nullptr, + local_allocator->Adapter(kArenaAllocGraphBuilder)), + throwing_blocks_(kDefaultNumberOfThrowingBlocks, + local_allocator->Adapter(kArenaAllocGraphBuilder)), + number_of_branches_(0u), + quicken_index_for_dex_pc_(std::less<uint32_t>(), + local_allocator->Adapter(kArenaAllocGraphBuilder)) {} + HBasicBlock* HBasicBlockBuilder::MaybeCreateBlockAt(uint32_t dex_pc) { return MaybeCreateBlockAt(dex_pc, dex_pc); } @@ -41,20 +62,19 @@ bool HBasicBlockBuilder::CreateBranchTargets() { // Create the first block for the dex instructions, single successor of the entry block. MaybeCreateBlockAt(0u); - if (code_item_->tries_size_ != 0) { + if (code_item_accessor_.TriesSize() != 0) { // Create branch targets at the start/end of the TryItem range. These are // places where the program might fall through into/out of the a block and // where TryBoundary instructions will be inserted later. Other edges which // enter/exit the try blocks are a result of branches/switches. - for (size_t idx = 0; idx < code_item_->tries_size_; ++idx) { - const DexFile::TryItem* try_item = DexFile::GetTryItems(*code_item_, idx); - uint32_t dex_pc_start = try_item->start_addr_; - uint32_t dex_pc_end = dex_pc_start + try_item->insn_count_; + for (const DexFile::TryItem& try_item : code_item_accessor_.TryItems()) { + uint32_t dex_pc_start = try_item.start_addr_; + uint32_t dex_pc_end = dex_pc_start + try_item.insn_count_; MaybeCreateBlockAt(dex_pc_start); - if (dex_pc_end < code_item_->insns_size_in_code_units_) { + if (dex_pc_end < code_item_accessor_.InsnsSizeInCodeUnits()) { // TODO: Do not create block if the last instruction cannot fall through. MaybeCreateBlockAt(dex_pc_end); - } else if (dex_pc_end == code_item_->insns_size_in_code_units_) { + } else if (dex_pc_end == code_item_accessor_.InsnsSizeInCodeUnits()) { // The TryItem spans until the very end of the CodeItem and therefore // cannot have any code afterwards. } else { @@ -65,7 +85,7 @@ bool HBasicBlockBuilder::CreateBranchTargets() { } // Create branch targets for exception handlers. - const uint8_t* handlers_ptr = DexFile::GetCatchHandlerData(*code_item_, 0); + const uint8_t* handlers_ptr = code_item_accessor_.GetCatchHandlerData(); uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr); for (uint32_t idx = 0; idx < handlers_size; ++idx) { CatchHandlerIterator iterator(handlers_ptr); @@ -78,8 +98,7 @@ bool HBasicBlockBuilder::CreateBranchTargets() { // Iterate over all instructions and find branching instructions. Create blocks for // the locations these instructions branch to. - IterationRange<DexInstructionIterator> instructions = code_item_->Instructions(); - for (const DexInstructionPcPair& pair : instructions) { + for (const DexInstructionPcPair& pair : code_item_accessor_) { const uint32_t dex_pc = pair.DexPc(); const Instruction& instruction = pair.Inst(); @@ -109,7 +128,7 @@ bool HBasicBlockBuilder::CreateBranchTargets() { if (instruction.CanFlowThrough()) { DexInstructionIterator next(std::next(DexInstructionIterator(pair))); - if (next == instructions.end()) { + if (next == code_item_accessor_.end()) { // In the normal case we should never hit this but someone can artificially forge a dex // file to fall-through out the method code. In this case we bail out compilation. VLOG(compiler) << "Not compiled: Fall-through beyond the CodeItem"; @@ -130,7 +149,7 @@ void HBasicBlockBuilder::ConnectBasicBlocks() { bool is_throwing_block = false; // Calculate the qucikening index here instead of CreateBranchTargets since it's easier to // calculate in dex_pc order. - for (const DexInstructionPcPair& pair : code_item_->Instructions()) { + for (const DexInstructionPcPair& pair : code_item_accessor_) { const uint32_t dex_pc = pair.DexPc(); const Instruction& instruction = pair.Inst(); @@ -213,10 +232,12 @@ static const DexFile::TryItem* GetTryItem( // successors matches the order in which runtime exception delivery searches // for a handler. static void LinkToCatchBlocks(HTryBoundary* try_boundary, - const DexFile::CodeItem& code_item, + const CodeItemDataAccessor& accessor, const DexFile::TryItem* try_item, const ScopedArenaSafeMap<uint32_t, HBasicBlock*>& catch_blocks) { - for (CatchHandlerIterator it(code_item, *try_item); it.HasNext(); it.Next()) { + for (CatchHandlerIterator it(accessor.GetCatchHandlerData(try_item->handler_off_)); + it.HasNext(); + it.Next()) { try_boundary->AddExceptionHandler(catch_blocks.Get(it.GetHandlerAddress())); } } @@ -232,7 +253,7 @@ bool HBasicBlockBuilder::MightHaveLiveNormalPredecessors(HBasicBlock* catch_bloc } } - const Instruction& first = code_item_->InstructionAt(catch_block->GetDexPc()); + const Instruction& first = code_item_accessor_.InstructionAt(catch_block->GetDexPc()); if (first.Opcode() == Instruction::MOVE_EXCEPTION) { // Verifier guarantees that if a catch block begins with MOVE_EXCEPTION then // it has no live normal predecessors. @@ -250,7 +271,7 @@ bool HBasicBlockBuilder::MightHaveLiveNormalPredecessors(HBasicBlock* catch_bloc } void HBasicBlockBuilder::InsertTryBoundaryBlocks() { - if (code_item_->tries_size_ == 0) { + if (code_item_accessor_.TriesSize() == 0) { return; } @@ -272,12 +293,10 @@ void HBasicBlockBuilder::InsertTryBoundaryBlocks() { // loop for synchronized blocks. if (ContainsElement(throwing_blocks_, block)) { // Try to find a TryItem covering the block. - const int32_t try_item_idx = DexFile::FindTryItem(DexFile::GetTryItems(*code_item_, 0u), - code_item_->tries_size_, - block->GetDexPc()); - if (try_item_idx != -1) { + const DexFile::TryItem* try_item = code_item_accessor_.FindTryItem(block->GetDexPc()); + if (try_item != nullptr) { // Block throwing and in a TryItem. Store the try block information. - try_block_info.Put(block->GetBlockId(), DexFile::GetTryItems(*code_item_, try_item_idx)); + try_block_info.Put(block->GetBlockId(), try_item); } } } @@ -288,7 +307,7 @@ void HBasicBlockBuilder::InsertTryBoundaryBlocks() { // Iterate over catch blocks, create artifical landing pads if necessary to // simplify the CFG, and set metadata. - const uint8_t* handlers_ptr = DexFile::GetCatchHandlerData(*code_item_, 0); + const uint8_t* handlers_ptr = code_item_accessor_.GetCatchHandlerData(); uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr); for (uint32_t idx = 0; idx < handlers_size; ++idx) { CatchHandlerIterator iterator(handlers_ptr); @@ -336,7 +355,7 @@ void HBasicBlockBuilder::InsertTryBoundaryBlocks() { HTryBoundary* try_entry = new (allocator_) HTryBoundary( HTryBoundary::BoundaryKind::kEntry, try_block->GetDexPc()); try_block->CreateImmediateDominator()->AddInstruction(try_entry); - LinkToCatchBlocks(try_entry, *code_item_, try_item, catch_blocks); + LinkToCatchBlocks(try_entry, code_item_accessor_, try_item, catch_blocks); break; } } @@ -364,13 +383,13 @@ void HBasicBlockBuilder::InsertTryBoundaryBlocks() { HTryBoundary* try_exit = new (allocator_) HTryBoundary(HTryBoundary::BoundaryKind::kExit, successor->GetDexPc()); graph_->SplitEdge(try_block, successor)->AddInstruction(try_exit); - LinkToCatchBlocks(try_exit, *code_item_, try_item, catch_blocks); + LinkToCatchBlocks(try_exit, code_item_accessor_, try_item, catch_blocks); } } } bool HBasicBlockBuilder::Build() { - DCHECK(code_item_ != nullptr); + DCHECK(code_item_accessor_.HasCodeItem()); DCHECK(graph_->GetBlocks().empty()); graph_->SetEntryBlock(new (allocator_) HBasicBlock(graph_, kNoDexPc)); @@ -388,7 +407,7 @@ bool HBasicBlockBuilder::Build() { } void HBasicBlockBuilder::BuildIntrinsic() { - DCHECK(code_item_ == nullptr); + DCHECK(!code_item_accessor_.HasCodeItem()); DCHECK(graph_->GetBlocks().empty()); // Create blocks. diff --git a/compiler/optimizing/block_builder.h b/compiler/optimizing/block_builder.h index 7d0f56db34..e68b95c46f 100644 --- a/compiler/optimizing/block_builder.h +++ b/compiler/optimizing/block_builder.h @@ -19,6 +19,7 @@ #include "base/scoped_arena_allocator.h" #include "base/scoped_arena_containers.h" +#include "code_item_accessors.h" #include "dex_file.h" #include "nodes.h" @@ -28,22 +29,8 @@ class HBasicBlockBuilder : public ValueObject { public: HBasicBlockBuilder(HGraph* graph, const DexFile* const dex_file, - const DexFile::CodeItem* code_item, - ScopedArenaAllocator* local_allocator) - : allocator_(graph->GetAllocator()), - graph_(graph), - dex_file_(dex_file), - code_item_(code_item), - local_allocator_(local_allocator), - branch_targets_(code_item != nullptr ? code_item->insns_size_in_code_units_ - : /* fake dex_pc=0 for intrinsic graph */ 1u, - nullptr, - local_allocator->Adapter(kArenaAllocGraphBuilder)), - throwing_blocks_(kDefaultNumberOfThrowingBlocks, - local_allocator->Adapter(kArenaAllocGraphBuilder)), - number_of_branches_(0u), - quicken_index_for_dex_pc_(std::less<uint32_t>(), - local_allocator->Adapter(kArenaAllocGraphBuilder)) {} + const CodeItemDebugInfoAccessor& accessor, + ScopedArenaAllocator* local_allocator); // Creates basic blocks in `graph_` at branch target dex_pc positions of the // `code_item_`. Blocks are connected but left unpopulated with instructions. @@ -83,7 +70,7 @@ class HBasicBlockBuilder : public ValueObject { HGraph* const graph_; const DexFile* const dex_file_; - const DexFile::CodeItem* const code_item_; // null for intrinsic graph. + CodeItemDataAccessor code_item_accessor_; // null code item for intrinsic graph. ScopedArenaAllocator* const local_allocator_; ScopedArenaVector<HBasicBlock*> branch_targets_; diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc index d73ef1f3a1..af537dd653 100644 --- a/compiler/optimizing/builder.cc +++ b/compiler/optimizing/builder.cc @@ -37,7 +37,7 @@ namespace art { HGraphBuilder::HGraphBuilder(HGraph* graph, - const DexFile::CodeItem* code_item, + const CodeItemDebugInfoAccessor& accessor, const DexCompilationUnit* dex_compilation_unit, const DexCompilationUnit* outer_compilation_unit, CompilerDriver* driver, @@ -47,7 +47,7 @@ HGraphBuilder::HGraphBuilder(HGraph* graph, VariableSizedHandleScope* handles) : graph_(graph), dex_file_(&graph->GetDexFile()), - code_item_(code_item), + code_item_accessor_(accessor), dex_compilation_unit_(dex_compilation_unit), outer_compilation_unit_(outer_compilation_unit), compiler_driver_(driver), @@ -57,6 +57,23 @@ HGraphBuilder::HGraphBuilder(HGraph* graph, handles_(handles), return_type_(DataType::FromShorty(dex_compilation_unit_->GetShorty()[0])) {} +HGraphBuilder::HGraphBuilder(HGraph* graph, + const DexCompilationUnit* dex_compilation_unit, + const CodeItemDebugInfoAccessor& accessor, + VariableSizedHandleScope* handles, + DataType::Type return_type) + : graph_(graph), + dex_file_(&graph->GetDexFile()), + code_item_accessor_(accessor), + dex_compilation_unit_(dex_compilation_unit), + outer_compilation_unit_(nullptr), + compiler_driver_(nullptr), + code_generator_(nullptr), + compilation_stats_(nullptr), + interpreter_metadata_(nullptr), + handles_(handles), + return_type_(return_type) {} + bool HGraphBuilder::SkipCompilation(size_t number_of_branches) { if (compiler_driver_ == nullptr) { // Note that the compiler driver is null when unit testing. @@ -69,20 +86,20 @@ bool HGraphBuilder::SkipCompilation(size_t number_of_branches) { return false; } - if (compiler_options.IsHugeMethod(code_item_->insns_size_in_code_units_)) { + const uint32_t code_units = code_item_accessor_.InsnsSizeInCodeUnits(); + if (compiler_options.IsHugeMethod(code_units)) { VLOG(compiler) << "Skip compilation of huge method " << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex()) - << ": " << code_item_->insns_size_in_code_units_ << " code units"; + << ": " << code_units << " code units"; MaybeRecordStat(compilation_stats_, MethodCompilationStat::kNotCompiledHugeMethod); return true; } // If it's large and contains no branches, it's likely to be machine generated initialization. - if (compiler_options.IsLargeMethod(code_item_->insns_size_in_code_units_) - && (number_of_branches == 0)) { + if (compiler_options.IsLargeMethod(code_units) && (number_of_branches == 0)) { VLOG(compiler) << "Skip compilation of large method with no branch " << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex()) - << ": " << code_item_->insns_size_in_code_units_ << " code units"; + << ": " << code_units << " code units"; MaybeRecordStat(compilation_stats_, MethodCompilationStat::kNotCompiledLargeMethodNoBranches); return true; } @@ -91,17 +108,17 @@ bool HGraphBuilder::SkipCompilation(size_t number_of_branches) { } GraphAnalysisResult HGraphBuilder::BuildGraph() { - DCHECK(code_item_ != nullptr); + DCHECK(code_item_accessor_.HasCodeItem()); DCHECK(graph_->GetBlocks().empty()); - graph_->SetNumberOfVRegs(code_item_->registers_size_); - graph_->SetNumberOfInVRegs(code_item_->ins_size_); - graph_->SetMaximumNumberOfOutVRegs(code_item_->outs_size_); - graph_->SetHasTryCatch(code_item_->tries_size_ != 0); + graph_->SetNumberOfVRegs(code_item_accessor_.RegistersSize()); + graph_->SetNumberOfInVRegs(code_item_accessor_.InsSize()); + graph_->SetMaximumNumberOfOutVRegs(code_item_accessor_.OutsSize()); + graph_->SetHasTryCatch(code_item_accessor_.TriesSize() != 0); // Use ScopedArenaAllocator for all local allocations. ScopedArenaAllocator local_allocator(graph_->GetArenaStack()); - HBasicBlockBuilder block_builder(graph_, dex_file_, code_item_, &local_allocator); + HBasicBlockBuilder block_builder(graph_, dex_file_, code_item_accessor_, &local_allocator); SsaBuilder ssa_builder(graph_, dex_compilation_unit_->GetClassLoader(), dex_compilation_unit_->GetDexCache(), @@ -111,7 +128,7 @@ GraphAnalysisResult HGraphBuilder::BuildGraph() { &block_builder, &ssa_builder, dex_file_, - code_item_, + code_item_accessor_, return_type_, dex_compilation_unit_, outer_compilation_unit_, @@ -150,7 +167,7 @@ GraphAnalysisResult HGraphBuilder::BuildGraph() { } void HGraphBuilder::BuildIntrinsicGraph(ArtMethod* method) { - DCHECK(code_item_ == nullptr); + DCHECK(!code_item_accessor_.HasCodeItem()); DCHECK(graph_->GetBlocks().empty()); // Determine the number of arguments and associated vregs. @@ -170,7 +187,10 @@ void HGraphBuilder::BuildIntrinsicGraph(ArtMethod* method) { // Use ScopedArenaAllocator for all local allocations. ScopedArenaAllocator local_allocator(graph_->GetArenaStack()); - HBasicBlockBuilder block_builder(graph_, dex_file_, /* code_item */ nullptr, &local_allocator); + HBasicBlockBuilder block_builder(graph_, + dex_file_, + CodeItemDebugInfoAccessor(), + &local_allocator); SsaBuilder ssa_builder(graph_, dex_compilation_unit_->GetClassLoader(), dex_compilation_unit_->GetDexCache(), @@ -180,7 +200,7 @@ void HGraphBuilder::BuildIntrinsicGraph(ArtMethod* method) { &block_builder, &ssa_builder, dex_file_, - /* code_item */ nullptr, + CodeItemDebugInfoAccessor(), return_type_, dex_compilation_unit_, outer_compilation_unit_, diff --git a/compiler/optimizing/builder.h b/compiler/optimizing/builder.h index 0bb3a051f7..c40e0b4e6a 100644 --- a/compiler/optimizing/builder.h +++ b/compiler/optimizing/builder.h @@ -18,6 +18,7 @@ #define ART_COMPILER_OPTIMIZING_BUILDER_H_ #include "base/arena_object.h" +#include "code_item_accessors.h" #include "dex_file-inl.h" #include "dex_file.h" #include "driver/compiler_driver.h" @@ -33,7 +34,7 @@ class OptimizingCompilerStats; class HGraphBuilder : public ValueObject { public: HGraphBuilder(HGraph* graph, - const DexFile::CodeItem* code_item, + const CodeItemDebugInfoAccessor& accessor, const DexCompilationUnit* dex_compilation_unit, const DexCompilationUnit* outer_compilation_unit, CompilerDriver* driver, @@ -45,20 +46,9 @@ class HGraphBuilder : public ValueObject { // Only for unit testing. HGraphBuilder(HGraph* graph, const DexCompilationUnit* dex_compilation_unit, - const DexFile::CodeItem& code_item, + const CodeItemDebugInfoAccessor& accessor, VariableSizedHandleScope* handles, - DataType::Type return_type = DataType::Type::kInt32) - : graph_(graph), - dex_file_(&graph->GetDexFile()), - code_item_(&code_item), - dex_compilation_unit_(dex_compilation_unit), - outer_compilation_unit_(nullptr), - compiler_driver_(nullptr), - code_generator_(nullptr), - compilation_stats_(nullptr), - interpreter_metadata_(nullptr), - handles_(handles), - return_type_(return_type) {} + DataType::Type return_type = DataType::Type::kInt32); GraphAnalysisResult BuildGraph(); void BuildIntrinsicGraph(ArtMethod* method); @@ -70,7 +60,7 @@ class HGraphBuilder : public ValueObject { HGraph* const graph_; const DexFile* const dex_file_; - const DexFile::CodeItem* const code_item_; // null for intrinsic graph. + const CodeItemDebugInfoAccessor code_item_accessor_; // null for intrinsic graph. // The compilation unit of the current method being compiled. Note that // it can be an inlined method. diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 8750910fe1..7a66d807cf 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -1381,26 +1381,26 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, bool same_dex_file = IsSameDexFile(*outer_compilation_unit_.GetDexFile(), *method->GetDexFile()); - const DexFile::CodeItem* code_item = method->GetCodeItem(); + CodeItemDataAccessor accessor(method); - if (code_item == nullptr) { + if (!accessor.HasCodeItem()) { LOG_FAIL_NO_STAT() << "Method " << method->PrettyMethod() << " is not inlined because it is native"; return false; } size_t inline_max_code_units = compiler_driver_->GetCompilerOptions().GetInlineMaxCodeUnits(); - if (code_item->insns_size_in_code_units_ > inline_max_code_units) { + if (accessor.InsnsSizeInCodeUnits() > inline_max_code_units) { LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedCodeItem) << "Method " << method->PrettyMethod() << " is not inlined because its code item is too big: " - << code_item->insns_size_in_code_units_ + << accessor.InsnsSizeInCodeUnits() << " > " << inline_max_code_units; return false; } - if (code_item->tries_size_ != 0) { + if (accessor.TriesSize() != 0) { LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedTryCatch) << "Method " << method->PrettyMethod() << " is not inlined because of try block"; return false; @@ -1660,6 +1660,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, const DexFile::CodeItem* code_item = resolved_method->GetCodeItem(); const DexFile& callee_dex_file = *resolved_method->GetDexFile(); uint32_t method_index = resolved_method->GetDexMethodIndex(); + CodeItemDebugInfoAccessor code_item_accessor(&callee_dex_file, code_item); ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker(); Handle<mirror::DexCache> dex_cache = NewHandleIfDifferent(resolved_method->GetDexCache(), caller_compilation_unit_.GetDexCache(), @@ -1714,7 +1715,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, } } HGraphBuilder builder(callee_graph, - code_item, + code_item_accessor, &dex_compilation_unit, &outer_compilation_unit_, compiler_driver_, @@ -1967,6 +1968,7 @@ void HInliner::RunOptimizations(HGraph* callee_graph, return; } + CodeItemDataAccessor accessor(&callee_graph->GetDexFile(), code_item); HInliner inliner(callee_graph, outermost_graph_, codegen_, @@ -1975,7 +1977,7 @@ void HInliner::RunOptimizations(HGraph* callee_graph, compiler_driver_, handles_, inline_stats_, - total_number_of_dex_registers_ + code_item->registers_size_, + total_number_of_dex_registers_ + accessor.RegistersSize(), total_number_of_instructions_ + number_of_instructions, this, depth_ + 1); diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc index e36d91fb05..3f5923fa3f 100644 --- a/compiler/optimizing/instruction_builder.cc +++ b/compiler/optimizing/instruction_builder.cc @@ -39,6 +39,44 @@ namespace art { +HInstructionBuilder::HInstructionBuilder(HGraph* graph, + HBasicBlockBuilder* block_builder, + SsaBuilder* ssa_builder, + const DexFile* dex_file, + const CodeItemDebugInfoAccessor& accessor, + DataType::Type return_type, + const DexCompilationUnit* dex_compilation_unit, + const DexCompilationUnit* outer_compilation_unit, + CompilerDriver* compiler_driver, + CodeGenerator* code_generator, + const uint8_t* interpreter_metadata, + OptimizingCompilerStats* compiler_stats, + VariableSizedHandleScope* handles, + ScopedArenaAllocator* local_allocator) + : allocator_(graph->GetAllocator()), + graph_(graph), + handles_(handles), + dex_file_(dex_file), + code_item_accessor_(accessor), + return_type_(return_type), + block_builder_(block_builder), + ssa_builder_(ssa_builder), + compiler_driver_(compiler_driver), + code_generator_(code_generator), + dex_compilation_unit_(dex_compilation_unit), + outer_compilation_unit_(outer_compilation_unit), + quicken_info_(interpreter_metadata), + compilation_stats_(compiler_stats), + local_allocator_(local_allocator), + locals_for_(local_allocator->Adapter(kArenaAllocGraphBuilder)), + current_block_(nullptr), + current_locals_(nullptr), + latest_result_(nullptr), + current_this_parameter_(nullptr), + loop_headers_(local_allocator->Adapter(kArenaAllocGraphBuilder)) { + loop_headers_.reserve(kDefaultNumberOfLoops); +} + HBasicBlock* HInstructionBuilder::FindBlockStartingAt(uint32_t dex_pc) const { return block_builder_->GetBlockAt(dex_pc); } @@ -273,7 +311,7 @@ static bool IsBlockPopulated(HBasicBlock* block) { } bool HInstructionBuilder::Build() { - DCHECK(code_item_ != nullptr); + DCHECK(code_item_accessor_.HasCodeItem()); locals_for_.resize( graph_->GetBlocks().size(), ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder))); @@ -323,7 +361,7 @@ bool HInstructionBuilder::Build() { quicken_index = block_builder_->GetQuickenIndex(block_dex_pc); } - for (const DexInstructionPcPair& pair : code_item_->Instructions(block_dex_pc)) { + for (const DexInstructionPcPair& pair : code_item_accessor_.InstructionsFrom(block_dex_pc)) { if (current_block_ == nullptr) { // The previous instruction ended this block. break; @@ -367,7 +405,7 @@ bool HInstructionBuilder::Build() { } void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) { - DCHECK(code_item_ == nullptr); + DCHECK(!code_item_accessor_.HasCodeItem()); DCHECK(method->IsIntrinsic()); locals_for_.resize( @@ -442,15 +480,16 @@ ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() { return false; } }; - CodeItemDebugInfoAccessor accessor(dex_file_, code_item_); ArenaBitVector* locations = ArenaBitVector::Create(local_allocator_, - accessor.InsnsSizeInCodeUnits(), + code_item_accessor_.InsnsSizeInCodeUnits(), /* expandable */ false, kArenaAllocGraphBuilder); locations->ClearAllBits(); - dex_file_->DecodeDebugPositionInfo(accessor.DebugInfoOffset(), Callback::Position, locations); + dex_file_->DecodeDebugPositionInfo(code_item_accessor_.DebugInfoOffset(), + Callback::Position, + locations); // Instruction-specific tweaks. - for (const DexInstructionPcPair& inst : accessor) { + for (const DexInstructionPcPair& inst : code_item_accessor_) { switch (inst->Opcode()) { case Instruction::MOVE_EXCEPTION: { // Stop in native debugger after the exception has been moved. @@ -459,7 +498,7 @@ ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() { locations->ClearBit(inst.DexPc()); DexInstructionIterator next = std::next(DexInstructionIterator(inst)); DCHECK(next.DexPc() != inst.DexPc()); - if (next != accessor.end()) { + if (next != code_item_accessor_.end()) { locations->SetBit(next.DexPc()); } break; @@ -1706,7 +1745,8 @@ void HInstructionBuilder::BuildFillArrayData(const Instruction& instruction, uin int32_t payload_offset = instruction.VRegB_31t() + dex_pc; const Instruction::ArrayDataPayload* payload = - reinterpret_cast<const Instruction::ArrayDataPayload*>(code_item_->insns_ + payload_offset); + reinterpret_cast<const Instruction::ArrayDataPayload*>( + code_item_accessor_.Insns() + payload_offset); const uint8_t* data = payload->data; uint32_t element_count = payload->element_count; diff --git a/compiler/optimizing/instruction_builder.h b/compiler/optimizing/instruction_builder.h index 0500d40cd3..b4e30516ab 100644 --- a/compiler/optimizing/instruction_builder.h +++ b/compiler/optimizing/instruction_builder.h @@ -19,6 +19,7 @@ #include "base/scoped_arena_allocator.h" #include "base/scoped_arena_containers.h" +#include "code_item_accessors.h" #include "data_type.h" #include "dex_file.h" #include "dex_file_types.h" @@ -50,7 +51,7 @@ class HInstructionBuilder : public ValueObject { HBasicBlockBuilder* block_builder, SsaBuilder* ssa_builder, const DexFile* dex_file, - const DexFile::CodeItem* code_item, + const CodeItemDebugInfoAccessor& accessor, DataType::Type return_type, const DexCompilationUnit* dex_compilation_unit, const DexCompilationUnit* outer_compilation_unit, @@ -59,30 +60,7 @@ class HInstructionBuilder : public ValueObject { const uint8_t* interpreter_metadata, OptimizingCompilerStats* compiler_stats, VariableSizedHandleScope* handles, - ScopedArenaAllocator* local_allocator) - : allocator_(graph->GetAllocator()), - graph_(graph), - handles_(handles), - dex_file_(dex_file), - code_item_(code_item), - return_type_(return_type), - block_builder_(block_builder), - ssa_builder_(ssa_builder), - compiler_driver_(compiler_driver), - code_generator_(code_generator), - dex_compilation_unit_(dex_compilation_unit), - outer_compilation_unit_(outer_compilation_unit), - quicken_info_(interpreter_metadata), - compilation_stats_(compiler_stats), - local_allocator_(local_allocator), - locals_for_(local_allocator->Adapter(kArenaAllocGraphBuilder)), - current_block_(nullptr), - current_locals_(nullptr), - latest_result_(nullptr), - current_this_parameter_(nullptr), - loop_headers_(local_allocator->Adapter(kArenaAllocGraphBuilder)) { - loop_headers_.reserve(kDefaultNumberOfLoops); - } + ScopedArenaAllocator* local_allocator); bool Build(); void BuildIntrinsic(ArtMethod* method); @@ -329,7 +307,7 @@ class HInstructionBuilder : public ValueObject { // The dex file where the method being compiled is, and the bytecode data. const DexFile* const dex_file_; - const DexFile::CodeItem* const code_item_; // null for intrinsic graph. + const CodeItemDebugInfoAccessor code_item_accessor_; // null for intrinsic graph. // The return type of the method being compiled. const DataType::Type return_type_; diff --git a/compiler/optimizing/optimization.cc b/compiler/optimizing/optimization.cc index 7149d93d07..d8ac696d1e 100644 --- a/compiler/optimizing/optimization.cc +++ b/compiler/optimizing/optimization.cc @@ -35,6 +35,7 @@ #include "bounds_check_elimination.h" #include "cha_guard_optimization.h" +#include "code_item_accessors-inl.h" #include "code_sinking.h" #include "constant_folding.h" #include "constructor_fence_redundancy_elimination.h" @@ -241,7 +242,8 @@ ArenaVector<HOptimization*> ConstructOptimizations( opt = new (allocator) HDeadCodeElimination(graph, stats, name); break; case OptimizationPass::kInliner: { - size_t number_of_dex_registers = dex_compilation_unit.GetCodeItem()->registers_size_; + CodeItemDataAccessor accessor(dex_compilation_unit.GetDexFile(), + dex_compilation_unit.GetCodeItem()); opt = new (allocator) HInliner(graph, // outer_graph graph, // outermost_graph codegen, @@ -250,7 +252,7 @@ ArenaVector<HOptimization*> ConstructOptimizations( driver, handles, stats, - number_of_dex_registers, + accessor.RegistersSize(), /* total_number_of_instructions */ 0, /* parent */ nullptr, /* depth */ 0, diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 24b1a123ee..9d04dd8343 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -766,11 +766,13 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, static constexpr size_t kSpaceFilterOptimizingThreshold = 128; const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions(); if ((compiler_options.GetCompilerFilter() == CompilerFilter::kSpace) - && (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) { + && (CodeItemInstructionAccessor(&dex_file, code_item).InsnsSizeInCodeUnits() > + kSpaceFilterOptimizingThreshold)) { MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledSpaceFilter); return nullptr; } + CodeItemDebugInfoAccessor code_item_accessor(&dex_file, code_item); HGraph* graph = new (allocator) HGraph( allocator, arena_stack, @@ -814,7 +816,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, VLOG(compiler) << "Building " << pass_observer.GetMethodName(); PassScope scope(HGraphBuilder::kBuilderPassName, &pass_observer); HGraphBuilder builder(graph, - code_item, + code_item_accessor, &dex_compilation_unit, &dex_compilation_unit, compiler_driver, @@ -932,7 +934,7 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic( VLOG(compiler) << "Building intrinsic graph " << pass_observer.GetMethodName(); PassScope scope(HGraphBuilder::kBuilderPassName, &pass_observer); HGraphBuilder builder(graph, - /* code_item */ nullptr, + CodeItemDebugInfoAccessor(), // Null code item. &dex_compilation_unit, &dex_compilation_unit, compiler_driver, diff --git a/compiler/optimizing/optimizing_unit_test.h b/compiler/optimizing/optimizing_unit_test.h index 158c252f45..7d05262b10 100644 --- a/compiler/optimizing/optimizing_unit_test.h +++ b/compiler/optimizing/optimizing_unit_test.h @@ -19,6 +19,7 @@ #include "base/scoped_arena_allocator.h" #include "builder.h" +#include "code_item_accessors-inl.h" #include "common_compiler_test.h" #include "dex_file.h" #include "dex_instruction.h" @@ -145,7 +146,8 @@ class OptimizingUnitTest : public CommonCompilerTest { /* access_flags */ 0u, /* verified_method */ nullptr, handles_->NewHandle<mirror::DexCache>(nullptr)); - HGraphBuilder builder(graph, dex_compilation_unit, *code_item, handles_.get(), return_type); + CodeItemDebugInfoAccessor accessor(&graph->GetDexFile(), code_item); + HGraphBuilder builder(graph, dex_compilation_unit, accessor, handles_.get(), return_type); bool graph_built = (builder.BuildGraph() == kAnalysisSuccess); return graph_built ? graph : nullptr; } |