diff options
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/code_generator.cc | 26 | ||||
-rw-r--r-- | compiler/optimizing/code_generator.h | 25 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 3 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 3 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_mips.cc | 5 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 3 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 37 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.h | 6 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 11 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 30 | ||||
-rw-r--r-- | compiler/optimizing/sharpening.cc | 16 |
11 files changed, 152 insertions, 13 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 8b450e11dc..a5f248dd20 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -1375,4 +1375,30 @@ uint32_t CodeGenerator::GetReferenceDisableFlagOffset() const { return klass->GetDisableIntrinsicFlagOffset().Uint32Value(); } +void CodeGenerator::EmitJitRoots(uint8_t* code, + Handle<mirror::ObjectArray<mirror::Object>> roots, + const uint8_t* roots_data, + Handle<mirror::DexCache> outer_dex_cache) { + DCHECK_EQ(static_cast<size_t>(roots->GetLength()), GetNumberOfJitRoots()); + StackHandleScope<1> hs(Thread::Current()); + MutableHandle<mirror::DexCache> h_dex_cache(hs.NewHandle<mirror::DexCache>(nullptr)); + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + size_t index = 0; + for (auto& entry : jit_string_roots_) { + const DexFile& entry_dex_file = *entry.first.dex_file; + // Avoid the expensive FindDexCache call by checking if the string is + // in the compiled method's dex file. + h_dex_cache.Assign(IsSameDexFile(*outer_dex_cache->GetDexFile(), entry_dex_file) + ? outer_dex_cache.Get() + : class_linker->FindDexCache(hs.Self(), entry_dex_file)); + mirror::String* string = class_linker->LookupString( + entry_dex_file, entry.first.string_index, h_dex_cache); + DCHECK(string != nullptr) << "JIT roots require strings to have been loaded"; + roots->Set(index, string); + entry.second = index; + ++index; + } + EmitJitRootPatches(code, roots_data); +} + } // namespace art diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h index a81f24e3d8..212d5711f7 100644 --- a/compiler/optimizing/code_generator.h +++ b/compiler/optimizing/code_generator.h @@ -31,6 +31,7 @@ #include "nodes.h" #include "optimizing_compiler_stats.h" #include "stack_map_stream.h" +#include "string_reference.h" #include "utils/label.h" namespace art { @@ -331,6 +332,17 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { void BuildStackMaps(MemoryRegion region, const DexFile::CodeItem& code_item); size_t ComputeStackMapsSize(); + size_t GetNumberOfJitRoots() const { + return jit_string_roots_.size(); + } + + // Fills the `literals` array with literals collected during code generation. + // Also emits literal patches. + void EmitJitRoots(uint8_t* code, + Handle<mirror::ObjectArray<mirror::Object>> roots, + const uint8_t* roots_data, + Handle<mirror::DexCache> outer_dex_cache) + REQUIRES_SHARED(Locks::mutator_lock_); bool IsLeafMethod() const { return is_leaf_; @@ -567,6 +579,8 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { fpu_callee_save_mask_(fpu_callee_save_mask), stack_map_stream_(graph->GetArena()), block_order_(nullptr), + jit_string_roots_(StringReferenceValueComparator(), + graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), disasm_info_(nullptr), stats_(stats), graph_(graph), @@ -633,6 +647,12 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { return current_slow_path_; } + // Emit the patches assocatied with JIT roots. Only applies to JIT compiled code. + virtual void EmitJitRootPatches(uint8_t* code ATTRIBUTE_UNUSED, + const uint8_t* roots_data ATTRIBUTE_UNUSED) { + DCHECK_EQ(jit_string_roots_.size(), 0u); + } + // Frame size required for this method. uint32_t frame_size_; uint32_t core_spill_mask_; @@ -658,6 +678,11 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { // The order to use for code generation. const ArenaVector<HBasicBlock*>* block_order_; + // Maps a StringReference (dex_file, string_index) to the index in the literal table. + // Entries are intially added with a 0 index, and `EmitJitRoots` will compute all the + // indices. + ArenaSafeMap<StringReference, size_t, StringReferenceValueComparator> jit_string_roots_; + DisassemblyInformation* disasm_info_; private: diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 08d22f84d0..f9ef96ceb5 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -5889,6 +5889,9 @@ HLoadString::LoadKind CodeGeneratorARM::GetSupportedLoadStringKind( case HLoadString::LoadKind::kBssEntry: DCHECK(!Runtime::Current()->UseJitCompilation()); break; + case HLoadString::LoadKind::kJitTableAddress: + DCHECK(Runtime::Current()->UseJitCompilation()); + return HLoadString::LoadKind::kDexCacheViaMethod; case HLoadString::LoadKind::kDexCacheViaMethod: break; } diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index f5119df4e9..4f7cc618e6 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -4386,6 +4386,9 @@ HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind( break; case HLoadString::LoadKind::kDexCacheViaMethod: break; + case HLoadString::LoadKind::kJitTableAddress: + DCHECK(Runtime::Current()->UseJitCompilation()); + return HLoadString::LoadKind::kDexCacheViaMethod; } return desired_string_load_kind; } diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index 6e9fbd2560..0960c54408 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -5209,6 +5209,11 @@ HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind( case HLoadString::LoadKind::kDexCacheViaMethod: fallback_load = false; break; + case HLoadString::LoadKind::kJitTableAddress: + DCHECK(Runtime::Current()->UseJitCompilation()); + // TODO: implement. + fallback_load = true; + break; } if (fallback_load) { desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod; diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index c90227930c..a2596379d7 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -6223,6 +6223,9 @@ HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind( break; case HLoadString::LoadKind::kDexCacheViaMethod: break; + case HLoadString::LoadKind::kJitTableAddress: + DCHECK(Runtime::Current()->UseJitCompilation()); + return HLoadString::LoadKind::kDexCacheViaMethod; } return desired_string_load_kind; } diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 89b16d3f77..a1d22f8c93 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -1266,7 +1266,8 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph, simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), - fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { + fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), + jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister)); } @@ -5635,6 +5636,9 @@ HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind( break; case HLoadString::LoadKind::kDexCacheViaMethod: break; + case HLoadString::LoadKind::kJitTableAddress: + DCHECK(Runtime::Current()->UseJitCompilation()); + break; } return desired_string_load_kind; } @@ -5664,6 +5668,14 @@ void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) { } } +Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index) { + jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u); + // Add a patch entry and return the label. + jit_string_patches_.emplace_back(dex_file, dex_index); + PatchInfo<Label>* info = &jit_string_patches_.back(); + return &info->label; +} + void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { LocationSummary* locations = load->GetLocations(); Location out_loc = locations->Out(); @@ -5695,6 +5707,15 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { __ Bind(slow_path->GetExitLabel()); return; } + case HLoadString::LoadKind::kJitTableAddress: { + Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, + /* no_rip */ true); + Label* fixup_label = + codegen_->NewJitRootStringPatch(load->GetDexFile(), load->GetStringIndex()); + // /* GcRoot<mirror::String> */ out = *address + GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kEmitCompilerReadBarrier); + return; + } default: break; } @@ -7080,6 +7101,20 @@ void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low, } } +void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) { + for (const PatchInfo<Label>& info : jit_string_patches_) { + const auto& it = jit_string_roots_.find(StringReference(&info.dex_file, info.index)); + DCHECK(it != jit_string_roots_.end()); + size_t index_in_table = it->second; + uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; + uintptr_t address = + reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>); + typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t; + reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] = + dchecked_integral_cast<uint32_t>(address); + } +} + #undef __ } // namespace x86_64 diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index 5a6dc54e7a..bc78b8cee6 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -411,11 +411,14 @@ class CodeGeneratorX86_64 : public CodeGenerator { void RecordTypePatch(HLoadClass* load_class); Label* NewStringBssEntryPatch(HLoadString* load_string); Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset); + Label* NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index); void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE; void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE; + void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE; + const X86_64InstructionSetFeatures& GetInstructionSetFeatures() const { return isa_features_; } @@ -601,6 +604,9 @@ class CodeGeneratorX86_64 : public CodeGenerator { // Fixups for jump tables need to be handled specially. ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_; + // Patches for string literals in JIT compiled code. + ArenaDeque<PatchInfo<Label>> jit_string_patches_; + DISALLOW_COPY_AND_ASSIGN(CodeGeneratorX86_64); }; diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index ce2edde1c1..a946e319c7 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -5692,7 +5692,10 @@ class HLoadString FINAL : public HInstruction { // all other types are unavailable. kDexCacheViaMethod, - kLast = kDexCacheViaMethod + // Load from the root table associated with the JIT compiled method. + kJitTableAddress, + + kLast = kJitTableAddress, }; HLoadString(HCurrentMethod* current_method, @@ -5750,7 +5753,8 @@ class HLoadString FINAL : public HInstruction { LoadKind load_kind = GetLoadKind(); if (load_kind == LoadKind::kBootImageLinkTimeAddress || load_kind == LoadKind::kBootImageLinkTimePcRelative || - load_kind == LoadKind::kBootImageAddress) { + load_kind == LoadKind::kBootImageAddress || + load_kind == LoadKind::kJitTableAddress) { return false; } return !IsInDexCache(); @@ -5803,7 +5807,8 @@ class HLoadString FINAL : public HInstruction { return load_kind == LoadKind::kBootImageLinkTimeAddress || load_kind == LoadKind::kBootImageLinkTimePcRelative || load_kind == LoadKind::kBssEntry || - load_kind == LoadKind::kDexCacheViaMethod; + load_kind == LoadKind::kDexCacheViaMethod || + load_kind == LoadKind::kJitTableAddress; } static bool HasAddress(LoadKind load_kind) { diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 6f84cdcc4f..1add660f1b 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -117,6 +117,7 @@ class CodeVectorAllocator FINAL : public CodeAllocator { size_t GetSize() const { return size_; } const ArenaVector<uint8_t>& GetMemory() const { return memory_; } + uint8_t* GetData() { return memory_.data(); } private: ArenaVector<uint8_t> memory_; @@ -1126,7 +1127,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, jit::JitCodeCache* code_cache, ArtMethod* method, bool osr) { - StackHandleScope<2> hs(self); + StackHandleScope<3> hs(self); Handle<mirror::ClassLoader> class_loader(hs.NewHandle( method->GetDeclaringClass()->GetClassLoader())); Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache())); @@ -1172,22 +1173,43 @@ bool OptimizingCompiler::JitCompile(Thread* self, } size_t stack_map_size = codegen->ComputeStackMapsSize(); - uint8_t* stack_map_data = code_cache->ReserveData(self, stack_map_size, method); - if (stack_map_data == nullptr) { + size_t number_of_roots = codegen->GetNumberOfJitRoots(); + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + // We allocate an object array to ensure the JIT roots that we will collect in EmitJitRoots + // will be visible by the GC between EmitLiterals and CommitCode. Once CommitCode is + // executed, this array is not needed. + Handle<mirror::ObjectArray<mirror::Object>> roots( + hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc( + self, class_linker->GetClassRoot(ClassLinker::kObjectArrayClass), number_of_roots))); + if (roots.Get() == nullptr) { + // Out of memory, just clear the exception to avoid any Java exception uncaught problems. + DCHECK(self->IsExceptionPending()); + self->ClearException(); + return false; + } + uint8_t* stack_map_data = nullptr; + uint8_t* roots_data = nullptr; + code_cache->ReserveData( + self, stack_map_size, number_of_roots, method, &stack_map_data, &roots_data); + if (stack_map_data == nullptr || roots_data == nullptr) { return false; } MaybeRecordStat(MethodCompilationStat::kCompiled); codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size), *code_item); + codegen->EmitJitRoots(code_allocator.GetData(), roots, roots_data, dex_cache); + const void* code = code_cache->CommitCode( self, method, stack_map_data, + roots_data, codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(), codegen->GetCoreSpillMask(), codegen->GetFpuSpillMask(), code_allocator.GetMemory().data(), code_allocator.GetSize(), - osr); + osr, + roots); if (code == nullptr) { code_cache->ClearData(self, stack_map_data); diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc index fd1db592bb..13e449431c 100644 --- a/compiler/optimizing/sharpening.cc +++ b/compiler/optimizing/sharpening.cc @@ -281,7 +281,8 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { : hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file)); if (codegen_->GetCompilerOptions().IsBootImage()) { - // Compiling boot image. Resolve the string and allocate it if needed. + // Compiling boot image. Resolve the string and allocate it if needed, to ensure + // the string will be added to the boot image. DCHECK(!runtime->UseJitCompilation()); mirror::String* string = class_linker->ResolveString(dex_file, string_index, dex_cache); CHECK(string != nullptr); @@ -297,10 +298,14 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { } else if (runtime->UseJitCompilation()) { // TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus. // DCHECK(!codegen_->GetCompilerOptions().GetCompilePic()); - mirror::String* string = dex_cache->GetResolvedString(string_index); - if (string != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(string)) { - desired_load_kind = HLoadString::LoadKind::kBootImageAddress; - address = reinterpret_cast64<uint64_t>(string); + mirror::String* string = class_linker->LookupString(dex_file, string_index, dex_cache); + if (string != nullptr) { + if (runtime->GetHeap()->ObjectIsInBootImageSpace(string)) { + desired_load_kind = HLoadString::LoadKind::kBootImageAddress; + address = reinterpret_cast64<uint64_t>(string); + } else { + desired_load_kind = HLoadString::LoadKind::kJitTableAddress; + } } } else { // AOT app compilation. Try to lookup the string without allocating if not found. @@ -322,6 +327,7 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { case HLoadString::LoadKind::kBootImageLinkTimePcRelative: case HLoadString::LoadKind::kBssEntry: case HLoadString::LoadKind::kDexCacheViaMethod: + case HLoadString::LoadKind::kJitTableAddress: load_string->SetLoadKindWithStringReference(load_kind, dex_file, string_index); break; case HLoadString::LoadKind::kBootImageAddress: |