diff options
author | 2018-07-15 23:58:44 +0100 | |
---|---|---|
committer | 2018-08-01 14:49:40 +0100 | |
commit | 8cd54547cec8a4537db5682c2da8be22843b1310 (patch) | |
tree | fb1158993bab2e027984cedab59b402c051a45a7 /compiler/optimizing | |
parent | 91f0fdb4372d3f2bcfcd9db67afcbe7ee1901048 (diff) |
Move MethodInfo to CodeInfo.
There is no need to treat it specially any more,
because of the de-duplication at BitTable level.
This saves 0.6% of oat file size.
Test: test-art-host-gtest
Change-Id: Ife7927d736243879a41d6f325d49ebf6930a63f6
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/code_generator.cc | 7 | ||||
-rw-r--r-- | compiler/optimizing/code_generator.h | 3 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 32 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_stream.cc | 26 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_stream.h | 13 |
5 files changed, 14 insertions, 67 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 0ebf4bec0a..b0a05da0b1 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -963,13 +963,10 @@ CodeGenerator::CodeGenerator(HGraph* graph, CodeGenerator::~CodeGenerator() {} -void CodeGenerator::ComputeStackMapAndMethodInfoSize(size_t* stack_map_size, - size_t* method_info_size) { +void CodeGenerator::ComputeStackMapSize(size_t* stack_map_size) { DCHECK(stack_map_size != nullptr); - DCHECK(method_info_size != nullptr); StackMapStream* stack_map_stream = GetStackMapStream(); *stack_map_size = stack_map_stream->PrepareForFillIn(); - *method_info_size = stack_map_stream->ComputeMethodInfoSize(); } size_t CodeGenerator::GetNumberOfJitRoots() const { @@ -1039,11 +1036,9 @@ static void CheckLoopEntriesCanBeUsedForOsr(const HGraph& graph, } void CodeGenerator::BuildStackMaps(MemoryRegion stack_map_region, - MemoryRegion method_info_region, const DexFile::CodeItem* code_item_for_osr_check) { StackMapStream* stack_map_stream = GetStackMapStream(); stack_map_stream->FillInCodeInfo(stack_map_region); - stack_map_stream->FillInMethodInfo(method_info_region); if (kIsDebugBuild && code_item_for_osr_check != nullptr) { CheckLoopEntriesCanBeUsedForOsr(*graph_, CodeInfo(stack_map_region), *code_item_for_osr_check); } diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h index 59f858ea52..3d58d29648 100644 --- a/compiler/optimizing/code_generator.h +++ b/compiler/optimizing/code_generator.h @@ -351,9 +351,8 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { void AddSlowPath(SlowPathCode* slow_path); void BuildStackMaps(MemoryRegion stack_map_region, - MemoryRegion method_info_region, const DexFile::CodeItem* code_item_for_osr_check); - void ComputeStackMapAndMethodInfoSize(size_t* stack_map_size, size_t* method_info_size); + void ComputeStackMapSize(size_t* stack_map_size); size_t GetNumberOfJitRoots() const; // Fills the `literals` array with literals collected during code generation. diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 939802626c..d96746fdd7 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -715,18 +715,16 @@ CompiledMethod* OptimizingCompiler::Emit(ArenaAllocator* allocator, ArenaVector<uint8_t> method_info(allocator->Adapter(kArenaAllocStackMaps)); size_t stack_map_size = 0; size_t method_info_size = 0; - codegen->ComputeStackMapAndMethodInfoSize(&stack_map_size, &method_info_size); + codegen->ComputeStackMapSize(&stack_map_size); stack_map.resize(stack_map_size); method_info.resize(method_info_size); codegen->BuildStackMaps(MemoryRegion(stack_map.data(), stack_map.size()), - MemoryRegion(method_info.data(), method_info.size()), code_item_for_osr_check); CompiledMethod* compiled_method = CompiledMethod::SwapAllocCompiledMethod( GetCompilerDriver(), codegen->GetInstructionSet(), code_allocator->GetMemory(), - ArrayRef<const uint8_t>(method_info), ArrayRef<const uint8_t>(stack_map), ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()), ArrayRef<const linker::LinkerPatch>(linker_patches)); @@ -1101,8 +1099,7 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item, static void CreateJniStackMap(ArenaStack* arena_stack, const JniCompiledMethod& jni_compiled_method, - /* out */ ArenaVector<uint8_t>* stack_map, - /* out */ ArenaVector<uint8_t>* method_info) { + /* out */ ArenaVector<uint8_t>* stack_map) { ScopedArenaAllocator allocator(arena_stack); StackMapStream stack_map_stream(&allocator, jni_compiled_method.GetInstructionSet()); stack_map_stream.BeginMethod( @@ -1112,9 +1109,7 @@ static void CreateJniStackMap(ArenaStack* arena_stack, /* num_dex_registers */ 0); stack_map_stream.EndMethod(); stack_map->resize(stack_map_stream.PrepareForFillIn()); - method_info->resize(stack_map_stream.ComputeMethodInfoSize()); stack_map_stream.FillInCodeInfo(MemoryRegion(stack_map->data(), stack_map->size())); - stack_map_stream.FillInMethodInfo(MemoryRegion(method_info->data(), method_info->size())); } CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags, @@ -1169,13 +1164,11 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags, MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub); ArenaVector<uint8_t> stack_map(allocator.Adapter(kArenaAllocStackMaps)); - ArenaVector<uint8_t> method_info(allocator.Adapter(kArenaAllocStackMaps)); - CreateJniStackMap(&arena_stack, jni_compiled_method, &stack_map, &method_info); + CreateJniStackMap(&arena_stack, jni_compiled_method, &stack_map); return CompiledMethod::SwapAllocCompiledMethod( GetCompilerDriver(), jni_compiled_method.GetInstructionSet(), jni_compiled_method.GetCode(), - ArrayRef<const uint8_t>(method_info), ArrayRef<const uint8_t>(stack_map), jni_compiled_method.GetCfi(), /* patches */ ArrayRef<const linker::LinkerPatch>()); @@ -1237,34 +1230,28 @@ bool OptimizingCompiler::JitCompile(Thread* self, ArenaSet<ArtMethod*, std::less<ArtMethod*>> cha_single_implementation_list( allocator.Adapter(kArenaAllocCHA)); ArenaVector<uint8_t> stack_map(allocator.Adapter(kArenaAllocStackMaps)); - ArenaVector<uint8_t> method_info(allocator.Adapter(kArenaAllocStackMaps)); ArenaStack arena_stack(runtime->GetJitArenaPool()); // StackMapStream is large and it does not fit into this frame, so we need helper method. // TODO: Try to avoid the extra memory copy that results from this. - CreateJniStackMap(&arena_stack, jni_compiled_method, &stack_map, &method_info); + CreateJniStackMap(&arena_stack, jni_compiled_method, &stack_map); uint8_t* stack_map_data = nullptr; - uint8_t* method_info_data = nullptr; uint8_t* roots_data = nullptr; uint32_t data_size = code_cache->ReserveData(self, stack_map.size(), - method_info.size(), /* number_of_roots */ 0, method, &stack_map_data, - &method_info_data, &roots_data); if (stack_map_data == nullptr || roots_data == nullptr) { MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit); return false; } memcpy(stack_map_data, stack_map.data(), stack_map.size()); - memcpy(method_info_data, method_info.data(), method_info.size()); const void* code = code_cache->CommitCode( self, method, stack_map_data, - method_info_data, roots_data, jni_compiled_method.GetCode().data(), jni_compiled_method.GetCode().size(), @@ -1340,8 +1327,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, } size_t stack_map_size = 0; - size_t method_info_size = 0; - codegen->ComputeStackMapAndMethodInfoSize(&stack_map_size, &method_info_size); + codegen->ComputeStackMapSize(&stack_map_size); size_t number_of_roots = codegen->GetNumberOfJitRoots(); // We allocate an object array to ensure the JIT roots that we will collect in EmitJitRoots // will be visible by the GC between EmitLiterals and CommitCode. Once CommitCode is @@ -1357,30 +1343,24 @@ bool OptimizingCompiler::JitCompile(Thread* self, return false; } uint8_t* stack_map_data = nullptr; - uint8_t* method_info_data = nullptr; uint8_t* roots_data = nullptr; uint32_t data_size = code_cache->ReserveData(self, stack_map_size, - method_info_size, number_of_roots, method, &stack_map_data, - &method_info_data, &roots_data); if (stack_map_data == nullptr || roots_data == nullptr) { MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit); return false; } - codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size), - MemoryRegion(method_info_data, method_info_size), - code_item); + codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size), code_item); codegen->EmitJitRoots(code_allocator.GetData(), roots, roots_data); const void* code = code_cache->CommitCode( self, method, stack_map_data, - method_info_data, roots_data, code_allocator.GetMemory().data(), code_allocator.GetMemory().size(), diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index e1b657554f..429054cec7 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -196,7 +196,7 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, if (encode_art_method) { CHECK_EQ(inline_info.GetArtMethod(), method); } else { - CHECK_EQ(method_infos_[inline_info.GetMethodInfoIndex()][0], method->GetDexMethodIndex()); + CHECK_EQ(code_info.GetMethodIndexOf(inline_info), method->GetDexMethodIndex()); } }); } @@ -274,24 +274,6 @@ void StackMapStream::CreateDexRegisterMap() { } } -void StackMapStream::FillInMethodInfo(MemoryRegion region) { - { - MethodInfo info(region.begin(), method_infos_.size()); - for (size_t i = 0; i < method_infos_.size(); ++i) { - info.SetMethodIndex(i, method_infos_[i][0]); - } - } - if (kVerifyStackMaps) { - // Check the data matches. - MethodInfo info(region.begin()); - const size_t count = info.NumMethodIndices(); - DCHECK_EQ(count, method_infos_.size()); - for (size_t i = 0; i < count; ++i) { - DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i][0]); - } - } -} - template<typename Writer, typename Builder> ALWAYS_INLINE static void EncodeTable(Writer& out, const Builder& bit_table) { out.WriteBit(false); // Is not deduped. @@ -317,6 +299,7 @@ size_t StackMapStream::PrepareForFillIn() { BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&out_, out_.size() * kBitsPerByte); EncodeTable(out, stack_maps_); EncodeTable(out, inline_infos_); + EncodeTable(out, method_infos_); EncodeTable(out, register_masks_); EncodeTable(out, stack_masks_); EncodeTable(out, dex_register_masks_); @@ -347,9 +330,4 @@ void StackMapStream::FillInCodeInfo(MemoryRegion region) { } } -size_t StackMapStream::ComputeMethodInfoSize() const { - DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__; - return MethodInfo::ComputeSize(method_infos_.size()); -} - } // namespace art diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h index 203c2cdf84..de79f4921e 100644 --- a/compiler/optimizing/stack_map_stream.h +++ b/compiler/optimizing/stack_map_stream.h @@ -25,7 +25,6 @@ #include "base/scoped_arena_containers.h" #include "base/value_object.h" #include "dex_register_location.h" -#include "method_info.h" #include "nodes.h" #include "stack_map.h" @@ -40,14 +39,14 @@ class StackMapStream : public ValueObject { explicit StackMapStream(ScopedArenaAllocator* allocator, InstructionSet instruction_set) : instruction_set_(instruction_set), stack_maps_(allocator), + inline_infos_(allocator), + method_infos_(allocator), register_masks_(allocator), stack_masks_(allocator), - inline_infos_(allocator), dex_register_masks_(allocator), dex_register_maps_(allocator), dex_register_catalog_(allocator), out_(allocator->Adapter(kArenaAllocStackMapStream)), - method_infos_(allocator), lazy_stack_masks_(allocator->Adapter(kArenaAllocStackMapStream)), current_stack_map_(), current_inline_infos_(allocator->Adapter(kArenaAllocStackMapStream)), @@ -92,9 +91,6 @@ class StackMapStream : public ValueObject { // Returns the size (in bytes) needed to store this stream. size_t PrepareForFillIn(); void FillInCodeInfo(MemoryRegion region); - void FillInMethodInfo(MemoryRegion region); - - size_t ComputeMethodInfoSize() const; private: static constexpr uint32_t kNoValue = -1; @@ -107,16 +103,15 @@ class StackMapStream : public ValueObject { uint32_t fp_spill_mask_ = 0; uint32_t num_dex_registers_ = 0; BitTableBuilder<StackMap> stack_maps_; + BitTableBuilder<InlineInfo> inline_infos_; + BitTableBuilder<MethodInfo> method_infos_; BitTableBuilder<RegisterMask> register_masks_; BitmapTableBuilder stack_masks_; - BitTableBuilder<InlineInfo> inline_infos_; BitmapTableBuilder dex_register_masks_; BitTableBuilder<MaskInfo> dex_register_maps_; BitTableBuilder<DexRegisterInfo> dex_register_catalog_; ScopedArenaVector<uint8_t> out_; - BitTableBuilderBase<1> method_infos_; - ScopedArenaVector<BitVector*> lazy_stack_masks_; // Variables which track the current state between Begin/End calls; |