diff options
Diffstat (limited to 'compiler')
44 files changed, 1499 insertions, 482 deletions
diff --git a/compiler/Android.mk b/compiler/Android.mk index 0ed843b918..348eabdde5 100644 --- a/compiler/Android.mk +++ b/compiler/Android.mk @@ -58,6 +58,10 @@ LIBART_COMPILER_SRC_FILES := \ driver/compiler_driver.cc \ driver/compiler_options.cc \ driver/dex_compilation_unit.cc \ + linker/buffered_output_stream.cc \ + linker/file_output_stream.cc \ + linker/output_stream.cc \ + linker/vector_output_stream.cc \ linker/relative_patcher.cc \ jit/jit_compiler.cc \ jni/quick/calling_convention.cc \ @@ -100,16 +104,12 @@ LIBART_COMPILER_SRC_FILES := \ trampolines/trampoline_compiler.cc \ utils/assembler.cc \ utils/swap_space.cc \ - buffered_output_stream.cc \ compiler.cc \ elf_writer.cc \ elf_writer_debug.cc \ elf_writer_quick.cc \ - file_output_stream.cc \ image_writer.cc \ - oat_writer.cc \ - output_stream.cc \ - vector_output_stream.cc + oat_writer.cc LIBART_COMPILER_SRC_FILES_arm := \ dex/quick/arm/assemble_arm.cc \ diff --git a/compiler/dwarf/dwarf_test.h b/compiler/dwarf/dwarf_test.h index 5464ed9c49..c3a3ca9425 100644 --- a/compiler/dwarf/dwarf_test.h +++ b/compiler/dwarf/dwarf_test.h @@ -29,6 +29,7 @@ #include "common_runtime_test.h" #include "elf_builder.h" #include "gtest/gtest.h" +#include "linker/file_output_stream.h" #include "os.h" namespace art { diff --git a/compiler/elf_builder.h b/compiler/elf_builder.h index c19bc3db5c..bb07cc2913 100644 --- a/compiler/elf_builder.h +++ b/compiler/elf_builder.h @@ -23,10 +23,9 @@ #include "base/bit_utils.h" #include "base/casts.h" #include "base/unix_file/fd_file.h" -#include "buffered_output_stream.h" #include "elf_utils.h" -#include "file_output_stream.h" #include "leb128.h" +#include "linker/error_delaying_output_stream.h" #include "utils/array_ref.h" namespace art { @@ -121,8 +120,8 @@ class ElfBuilder FINAL { sections.push_back(this); // Align file position. if (header_.sh_type != SHT_NOBITS) { - header_.sh_offset = RoundUp(owner_->Seek(0, kSeekCurrent), header_.sh_addralign); - owner_->Seek(header_.sh_offset, kSeekSet); + header_.sh_offset = RoundUp(owner_->stream_.Seek(0, kSeekCurrent), header_.sh_addralign); + owner_->stream_.Seek(header_.sh_offset, kSeekSet); } // Align virtual memory address. if ((header_.sh_flags & SHF_ALLOC) != 0) { @@ -140,7 +139,7 @@ class ElfBuilder FINAL { CHECK_GT(header_.sh_size, 0u); } else { // Use the current file position to determine section size. - off_t file_offset = owner_->Seek(0, kSeekCurrent); + off_t file_offset = owner_->stream_.Seek(0, kSeekCurrent); CHECK_GE(file_offset, (off_t)header_.sh_offset); header_.sh_size = file_offset - header_.sh_offset; } @@ -162,7 +161,7 @@ class ElfBuilder FINAL { } else { CHECK(started_); CHECK_NE(header_.sh_type, (Elf_Word)SHT_NOBITS); - return owner_->Seek(0, kSeekCurrent) - header_.sh_offset; + return owner_->stream_.Seek(0, kSeekCurrent) - header_.sh_offset; } } @@ -177,21 +176,20 @@ class ElfBuilder FINAL { bool WriteFully(const void* buffer, size_t byte_count) OVERRIDE { CHECK(started_); CHECK(!finished_); - owner_->WriteFully(buffer, byte_count); - return true; + return owner_->stream_.WriteFully(buffer, byte_count); } // This function always succeeds to simplify code. // Use builder's Good() to check the actual status. off_t Seek(off_t offset, Whence whence) OVERRIDE { // Forward the seek as-is and trust the caller to use it reasonably. - return owner_->Seek(offset, whence); + return owner_->stream_.Seek(offset, whence); } // This function flushes the output and returns whether it succeeded. // If there was a previous failure, this does nothing and returns false, i.e. failed. bool Flush() OVERRIDE { - return owner_->Flush(); + return owner_->stream_.Flush(); } Elf_Word GetSectionIndex() const { @@ -277,26 +275,24 @@ class ElfBuilder FINAL { }; ElfBuilder(InstructionSet isa, OutputStream* output) - : isa_(isa), - output_(output), - output_good_(true), - output_offset_(0), - rodata_(this, ".rodata", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0), - text_(this, ".text", SHT_PROGBITS, SHF_ALLOC | SHF_EXECINSTR, nullptr, 0, kPageSize, 0), - bss_(this, ".bss", SHT_NOBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0), - dynstr_(this, ".dynstr", SHF_ALLOC, kPageSize), - dynsym_(this, ".dynsym", SHT_DYNSYM, SHF_ALLOC, &dynstr_), - hash_(this, ".hash", SHT_HASH, SHF_ALLOC, &dynsym_, 0, sizeof(Elf_Word), sizeof(Elf_Word)), - dynamic_(this, ".dynamic", SHT_DYNAMIC, SHF_ALLOC, &dynstr_, 0, kPageSize, sizeof(Elf_Dyn)), - eh_frame_(this, ".eh_frame", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0), - eh_frame_hdr_(this, ".eh_frame_hdr", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, 4, 0), - strtab_(this, ".strtab", 0, kPageSize), - symtab_(this, ".symtab", SHT_SYMTAB, 0, &strtab_), - debug_frame_(this, ".debug_frame", SHT_PROGBITS, 0, nullptr, 0, sizeof(Elf_Addr), 0), - debug_info_(this, ".debug_info", SHT_PROGBITS, 0, nullptr, 0, 1, 0), - debug_line_(this, ".debug_line", SHT_PROGBITS, 0, nullptr, 0, 1, 0), - shstrtab_(this, ".shstrtab", 0, 1), - virtual_address_(0) { + : isa_(isa), + stream_(output), + rodata_(this, ".rodata", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0), + text_(this, ".text", SHT_PROGBITS, SHF_ALLOC | SHF_EXECINSTR, nullptr, 0, kPageSize, 0), + bss_(this, ".bss", SHT_NOBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0), + dynstr_(this, ".dynstr", SHF_ALLOC, kPageSize), + dynsym_(this, ".dynsym", SHT_DYNSYM, SHF_ALLOC, &dynstr_), + hash_(this, ".hash", SHT_HASH, SHF_ALLOC, &dynsym_, 0, sizeof(Elf_Word), sizeof(Elf_Word)), + dynamic_(this, ".dynamic", SHT_DYNAMIC, SHF_ALLOC, &dynstr_, 0, kPageSize, sizeof(Elf_Dyn)), + eh_frame_(this, ".eh_frame", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0), + eh_frame_hdr_(this, ".eh_frame_hdr", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, 4, 0), + strtab_(this, ".strtab", 0, kPageSize), + symtab_(this, ".symtab", SHT_SYMTAB, 0, &strtab_), + debug_frame_(this, ".debug_frame", SHT_PROGBITS, 0, nullptr, 0, sizeof(Elf_Addr), 0), + debug_info_(this, ".debug_info", SHT_PROGBITS, 0, nullptr, 0, 1, 0), + debug_line_(this, ".debug_line", SHT_PROGBITS, 0, nullptr, 0, 1, 0), + shstrtab_(this, ".shstrtab", 0, 1), + virtual_address_(0) { text_.phdr_flags_ = PF_R | PF_X; bss_.phdr_flags_ = PF_R | PF_W; dynamic_.phdr_flags_ = PF_R | PF_W; @@ -353,7 +349,7 @@ class ElfBuilder FINAL { // We do not know the number of headers until later, so // it is easiest to just reserve a fixed amount of space. int size = sizeof(Elf_Ehdr) + sizeof(Elf_Phdr) * kMaxProgramHeaders; - Seek(size, kSeekSet); + stream_.Seek(size, kSeekSet); virtual_address_ += size; } @@ -377,9 +373,14 @@ class ElfBuilder FINAL { shdrs.push_back(section->header_); } Elf_Off section_headers_offset; - section_headers_offset = RoundUp(Seek(0, kSeekCurrent), sizeof(Elf_Off)); - Seek(section_headers_offset, kSeekSet); - WriteFully(shdrs.data(), shdrs.size() * sizeof(shdrs[0])); + section_headers_offset = RoundUp(stream_.Seek(0, kSeekCurrent), sizeof(Elf_Off)); + stream_.Seek(section_headers_offset, kSeekSet); + stream_.WriteFully(shdrs.data(), shdrs.size() * sizeof(shdrs[0])); + + // Flush everything else before writing the program headers. This should prevent + // the OS from reordering writes, so that we don't end up with valid headers + // and partially written data if we suddenly lose power, for example. + stream_.Flush(); // Write the initial file headers. std::vector<Elf_Phdr> phdrs = MakeProgramHeaders(); @@ -389,10 +390,10 @@ class ElfBuilder FINAL { elf_header.e_phnum = phdrs.size(); elf_header.e_shnum = shdrs.size(); elf_header.e_shstrndx = shstrtab_.GetSectionIndex(); - Seek(0, kSeekSet); - WriteFully(&elf_header, sizeof(elf_header)); - WriteFully(phdrs.data(), phdrs.size() * sizeof(phdrs[0])); - Flush(); + stream_.Seek(0, kSeekSet); + stream_.WriteFully(&elf_header, sizeof(elf_header)); + stream_.WriteFully(phdrs.data(), phdrs.size() * sizeof(phdrs[0])); + stream_.Flush(); } // The running program does not have access to section headers @@ -470,60 +471,15 @@ class ElfBuilder FINAL { // Returns true if all writes and seeks on the output stream succeeded. bool Good() { - return output_good_; + return stream_.Good(); } - private: - // This function always succeeds to simplify code. - // Use Good() to check the actual status of the output stream. - void WriteFully(const void* buffer, size_t byte_count) { - if (output_good_) { - if (!output_->WriteFully(buffer, byte_count)) { - PLOG(ERROR) << "Failed to write " << byte_count - << " bytes to ELF file at offset " << output_offset_; - output_good_ = false; - } - } - output_offset_ += byte_count; - } - - // This function always succeeds to simplify code. - // Use Good() to check the actual status of the output stream. - off_t Seek(off_t offset, Whence whence) { - // We keep shadow copy of the offset so that we return - // the expected value even if the output stream failed. - off_t new_offset; - switch (whence) { - case kSeekSet: - new_offset = offset; - break; - case kSeekCurrent: - new_offset = output_offset_ + offset; - break; - default: - LOG(FATAL) << "Unsupported seek type: " << whence; - UNREACHABLE(); - } - if (output_good_) { - off_t actual_offset = output_->Seek(offset, whence); - if (actual_offset == (off_t)-1) { - PLOG(ERROR) << "Failed to seek in ELF file. Offset=" << offset - << " whence=" << whence << " new_offset=" << new_offset; - output_good_ = false; - } - DCHECK_EQ(actual_offset, new_offset); - } - output_offset_ = new_offset; - return new_offset; - } - - bool Flush() { - if (output_good_) { - output_good_ = output_->Flush(); - } - return output_good_; + // Returns the builder's internal stream. + OutputStream* GetStream() { + return &stream_; } + private: static Elf_Ehdr MakeElfHeader(InstructionSet isa) { Elf_Ehdr elf_header = Elf_Ehdr(); switch (isa) { @@ -675,9 +631,7 @@ class ElfBuilder FINAL { InstructionSet isa_; - OutputStream* output_; - bool output_good_; // True if all writes to output succeeded. - off_t output_offset_; // Keep track of the current position in the stream. + ErrorDelayingOutputStream stream_; Section rodata_; Section text_; diff --git a/compiler/elf_writer.h b/compiler/elf_writer.h index 357d5f624e..c5a0fd50bd 100644 --- a/compiler/elf_writer.h +++ b/compiler/elf_writer.h @@ -62,6 +62,11 @@ class ElfWriter { virtual void WritePatchLocations(const ArrayRef<const uintptr_t>& patch_locations) = 0; virtual bool End() = 0; + // Get the ELF writer's stream. This stream can be used for writing data directly + // to a section after the section has been finished. When that's done, the user + // should Seek() back to the position where the stream was before this operation. + virtual OutputStream* GetStream() = 0; + protected: ElfWriter() = default; }; diff --git a/compiler/elf_writer_quick.cc b/compiler/elf_writer_quick.cc index 9da2af8d3e..e411496980 100644 --- a/compiler/elf_writer_quick.cc +++ b/compiler/elf_writer_quick.cc @@ -31,6 +31,8 @@ #include "elf_writer_debug.h" #include "globals.h" #include "leb128.h" +#include "linker/buffered_output_stream.h" +#include "linker/file_output_stream.h" #include "utils.h" namespace art { @@ -72,6 +74,8 @@ class ElfWriterQuick FINAL : public ElfWriter { void WritePatchLocations(const ArrayRef<const uintptr_t>& patch_locations) OVERRIDE; bool End() OVERRIDE; + virtual OutputStream* GetStream() OVERRIDE; + static void EncodeOatPatches(const std::vector<uintptr_t>& locations, std::vector<uint8_t>* buffer); @@ -191,6 +195,11 @@ bool ElfWriterQuick<ElfTypes>::End() { } template <typename ElfTypes> +OutputStream* ElfWriterQuick<ElfTypes>::GetStream() { + return builder_->GetStream(); +} + +template <typename ElfTypes> static void WriteDebugSymbols(ElfBuilder<ElfTypes>* builder, const ArrayRef<const dwarf::MethodDebugInfo>& method_infos) { bool generated_mapping_symbol = false; diff --git a/compiler/image_test.cc b/compiler/image_test.cc index 5f4a92299b..cda6240bd4 100644 --- a/compiler/image_test.cc +++ b/compiler/image_test.cc @@ -34,7 +34,6 @@ #include "scoped_thread_state_change.h" #include "signal_catcher.h" #include "utils.h" -#include "vector_output_stream.h" namespace art { diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index 341742e4dc..bf1fcdd5f5 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -540,7 +540,10 @@ ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const } bool ImageWriter::AllocMemory() { - const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_, + const size_t length = RoundUp(image_objects_offset_begin_ + + GetBinSizeSum() + + intern_table_bytes_ + + class_table_bytes_, kPageSize); std::string error_msg; image_.reset(MemMap::MapAnonymous("image writer image", @@ -1030,6 +1033,14 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { WalkFieldsInOrder(value); } } + } else if (h_obj->IsClassLoader()) { + // Register the class loader if it has a class table. + // The fake boot class loader should not get registered and we should end up with only one + // class loader. + mirror::ClassLoader* class_loader = h_obj->AsClassLoader(); + if (class_loader->GetClassTable() != nullptr) { + class_loaders_.insert(class_loader); + } } } } @@ -1154,10 +1165,26 @@ void ImageWriter::CalculateNewObjectOffsets() { } // Calculate how big the intern table will be after being serialized. - auto* const intern_table = Runtime::Current()->GetInternTable(); + InternTable* const intern_table = runtime->GetInternTable(); CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings"; intern_table_bytes_ = intern_table->WriteToMemory(nullptr); + // Write out the class table. + ClassLinker* class_linker = runtime->GetClassLinker(); + if (boot_image_space_ == nullptr) { + // Compiling the boot image, add null class loader. + class_loaders_.insert(nullptr); + } + if (!class_loaders_.empty()) { + CHECK_EQ(class_loaders_.size(), 1u) << "Should only have one real class loader in the image"; + ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); + for (mirror::ClassLoader* loader : class_loaders_) { + ClassTable* table = class_linker->ClassTableForClassLoader(loader); + CHECK(table != nullptr); + class_table_bytes_ += table->WriteToMemory(nullptr); + } + } + // Note that image_end_ is left at end of used mirror object section. } @@ -1199,6 +1226,12 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { auto* interned_strings_section = §ions[ImageHeader::kSectionInternedStrings]; *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_); cur_pos = interned_strings_section->End(); + // Calculate the size of the class table section. + auto* class_table_section = §ions[ImageHeader::kSectionClassTable]; + *class_table_section = ImageSection(cur_pos, class_table_bytes_); + cur_pos = class_table_section->End(); + // Image end goes right before the start of the image bitmap. + const size_t image_end = static_cast<uint32_t>(cur_pos); // Finally bitmap section. const size_t bitmap_bytes = image_bitmap_->Size(); auto* bitmap_section = §ions[ImageHeader::kSectionImageBitmap]; @@ -1212,7 +1245,6 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { } LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_; } - const size_t image_end = static_cast<uint32_t>(interned_strings_section->End()); CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) << "Oat file should be right after the image."; // Create the header. @@ -1323,23 +1355,48 @@ void ImageWriter::CopyAndFixupNativeData() { } image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method); } + FixupRootVisitor root_visitor(this); + // Write the intern table into the image. const ImageSection& intern_table_section = image_header->GetImageSection( ImageHeader::kSectionInternedStrings); - InternTable* const intern_table = Runtime::Current()->GetInternTable(); - uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset(); - const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr); + Runtime* const runtime = Runtime::Current(); + InternTable* const intern_table = runtime->GetInternTable(); + uint8_t* const intern_table_memory_ptr = image_->Begin() + intern_table_section.Offset(); + const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr); + CHECK_EQ(intern_table_bytes, intern_table_bytes_); // Fixup the pointers in the newly written intern table to contain image addresses. - InternTable temp_table; + InternTable temp_intern_table; // Note that we require that ReadFromMemory does not make an internal copy of the elements so that // the VisitRoots() will update the memory directly rather than the copies. // This also relies on visit roots not doing any verification which could fail after we update // the roots to be the image addresses. - temp_table.ReadFromMemory(memory_ptr); - CHECK_EQ(temp_table.Size(), intern_table->Size()); - FixupRootVisitor visitor(this); - temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots); - CHECK_EQ(intern_table_bytes, intern_table_bytes_); + temp_intern_table.ReadFromMemory(intern_table_memory_ptr); + CHECK_EQ(temp_intern_table.Size(), intern_table->Size()); + temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots); + + // Write the class table(s) into the image. + ClassLinker* const class_linker = runtime->GetClassLinker(); + const ImageSection& class_table_section = image_header->GetImageSection( + ImageHeader::kSectionClassTable); + uint8_t* const class_table_memory_ptr = image_->Begin() + class_table_section.Offset(); + ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); + size_t class_table_bytes = 0; + for (mirror::ClassLoader* loader : class_loaders_) { + ClassTable* table = class_linker->ClassTableForClassLoader(loader); + CHECK(table != nullptr); + uint8_t* memory_ptr = class_table_memory_ptr + class_table_bytes; + class_table_bytes += table->WriteToMemory(memory_ptr); + // Fixup the pointers in the newly written class table to contain image addresses. See + // above comment for intern tables. + ClassTable temp_class_table; + temp_class_table.ReadFromMemory(memory_ptr); + // CHECK_EQ(temp_class_table.NumNonZygoteClasses(), table->NumNonZygoteClasses()); + BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(&root_visitor, + RootInfo(kRootUnknown)); + temp_class_table.VisitRoots(buffered_visitor); + } + CHECK_EQ(class_table_bytes, class_table_bytes_); } void ImageWriter::CopyAndFixupObjects() { @@ -1553,8 +1610,7 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) { FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy)); - } else if (klass->IsSubClass(down_cast<mirror::Class*>( - class_linker->GetClassRoot(ClassLinker::kJavaLangClassLoader)))) { + } else if (klass->IsClassLoaderClass()) { // If src is a ClassLoader, set the class table to null so that it gets recreated by the // ClassLoader. down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr); @@ -1840,7 +1896,8 @@ uint8_t* ImageWriter::GetOatFileBegin() const { bin_slot_sizes_[kBinArtMethodDirty] + bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinDexCacheArray] + - intern_table_bytes_; + intern_table_bytes_ + + class_table_bytes_; return image_begin_ + RoundUp(image_end_ + native_sections_size, kPageSize); } diff --git a/compiler/image_writer.h b/compiler/image_writer.h index 889cd10dc4..386838fde0 100644 --- a/compiler/image_writer.h +++ b/compiler/image_writer.h @@ -72,7 +72,8 @@ class ImageWriter FINAL { intern_table_bytes_(0u), image_method_array_(ImageHeader::kImageMethodsCount), dirty_methods_(0u), - clean_methods_(0u) { + clean_methods_(0u), + class_table_bytes_(0u) { CHECK_NE(image_begin, 0U); std::fill_n(image_methods_, arraysize(image_methods_), nullptr); std::fill_n(oat_address_offsets_, arraysize(oat_address_offsets_), 0); @@ -453,6 +454,12 @@ class ImageWriter FINAL { // Prune class memoization table to speed up ContainsBootClassLoaderNonImageClass. std::unordered_map<mirror::Class*, bool> prune_class_memo_; + // Class loaders with a class table to write out. Should only be one currently. + std::unordered_set<mirror::ClassLoader*> class_loaders_; + + // Number of image class table bytes. + size_t class_table_bytes_; + friend class ContainsBootClassLoaderNonImageClassVisitor; friend class FixupClassVisitor; friend class FixupRootVisitor; diff --git a/compiler/jit/jit_compiler.cc b/compiler/jit/jit_compiler.cc index 2125c9a26a..d001495442 100644 --- a/compiler/jit/jit_compiler.cc +++ b/compiler/jit/jit_compiler.cc @@ -170,18 +170,6 @@ bool JitCompiler::CompileMethod(Thread* self, ArtMethod* method) { self->AssertNoPendingException(); Runtime* runtime = Runtime::Current(); - // Check if the method is already compiled. - if (runtime->GetJit()->GetCodeCache()->ContainsPc(method->GetEntryPointFromQuickCompiledCode())) { - VLOG(jit) << "Already compiled " << PrettyMethod(method); - return true; - } - - // Don't compile the method if we are supposed to be deoptimized. - instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation(); - if (instrumentation->AreAllMethodsDeoptimized() || instrumentation->IsDeoptimized(method)) { - return false; - } - // Ensure the class is initialized. Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass())); if (!runtime->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) { @@ -190,13 +178,13 @@ bool JitCompiler::CompileMethod(Thread* self, ArtMethod* method) { } // Do the compilation. - JitCodeCache* const code_cache = runtime->GetJit()->GetCodeCache(); bool success = false; { TimingLogger::ScopedTiming t2("Compiling", &logger); // If we get a request to compile a proxy method, we pass the actual Java method // of that proxy method, as the compiler does not expect a proxy method. ArtMethod* method_to_compile = method->GetInterfaceMethodIfProxy(sizeof(void*)); + JitCodeCache* const code_cache = runtime->GetJit()->GetCodeCache(); success = compiler_driver_->GetCompiler()->JitCompile(self, code_cache, method_to_compile); } diff --git a/compiler/linker/arm/relative_patcher_arm_base.cc b/compiler/linker/arm/relative_patcher_arm_base.cc index 13754fdaa1..73b0facf4b 100644 --- a/compiler/linker/arm/relative_patcher_arm_base.cc +++ b/compiler/linker/arm/relative_patcher_arm_base.cc @@ -17,9 +17,9 @@ #include "linker/arm/relative_patcher_arm_base.h" #include "compiled_method.h" +#include "linker/output_stream.h" #include "oat.h" #include "oat_quick_method_header.h" -#include "output_stream.h" namespace art { namespace linker { diff --git a/compiler/linker/arm64/relative_patcher_arm64.cc b/compiler/linker/arm64/relative_patcher_arm64.cc index 57018af840..3d4c2184f1 100644 --- a/compiler/linker/arm64/relative_patcher_arm64.cc +++ b/compiler/linker/arm64/relative_patcher_arm64.cc @@ -20,10 +20,10 @@ #include "art_method.h" #include "compiled_method.h" #include "driver/compiler_driver.h" -#include "utils/arm64/assembler_arm64.h" +#include "linker/output_stream.h" #include "oat.h" #include "oat_quick_method_header.h" -#include "output_stream.h" +#include "utils/arm64/assembler_arm64.h" namespace art { namespace linker { diff --git a/compiler/buffered_output_stream.cc b/compiler/linker/buffered_output_stream.cc index 4c66c764a9..4c66c764a9 100644 --- a/compiler/buffered_output_stream.cc +++ b/compiler/linker/buffered_output_stream.cc diff --git a/compiler/buffered_output_stream.h b/compiler/linker/buffered_output_stream.h index 1da3a6932f..a2eefbbf17 100644 --- a/compiler/buffered_output_stream.h +++ b/compiler/linker/buffered_output_stream.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ART_COMPILER_BUFFERED_OUTPUT_STREAM_H_ -#define ART_COMPILER_BUFFERED_OUTPUT_STREAM_H_ +#ifndef ART_COMPILER_LINKER_BUFFERED_OUTPUT_STREAM_H_ +#define ART_COMPILER_LINKER_BUFFERED_OUTPUT_STREAM_H_ #include <memory> @@ -51,4 +51,4 @@ class BufferedOutputStream FINAL : public OutputStream { } // namespace art -#endif // ART_COMPILER_BUFFERED_OUTPUT_STREAM_H_ +#endif // ART_COMPILER_LINKER_BUFFERED_OUTPUT_STREAM_H_ diff --git a/compiler/linker/error_delaying_output_stream.h b/compiler/linker/error_delaying_output_stream.h new file mode 100644 index 0000000000..99410e4bb1 --- /dev/null +++ b/compiler/linker/error_delaying_output_stream.h @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_COMPILER_LINKER_ERROR_DELAYING_OUTPUT_STREAM_H_ +#define ART_COMPILER_LINKER_ERROR_DELAYING_OUTPUT_STREAM_H_ + +#include "output_stream.h" + +#include "base/logging.h" + +namespace art { + +// OutputStream wrapper that delays reporting an error until Flush(). +class ErrorDelayingOutputStream FINAL : public OutputStream { + public: + explicit ErrorDelayingOutputStream(OutputStream* output) + : OutputStream(output->GetLocation()), + output_(output), + output_good_(true), + output_offset_(0) { } + + // This function always succeeds to simplify code. + // Use Good() to check the actual status of the output stream. + bool WriteFully(const void* buffer, size_t byte_count) OVERRIDE { + if (output_good_) { + if (!output_->WriteFully(buffer, byte_count)) { + PLOG(ERROR) << "Failed to write " << byte_count + << " bytes to " << GetLocation() << " at offset " << output_offset_; + output_good_ = false; + } + } + output_offset_ += byte_count; + return true; + } + + // This function always succeeds to simplify code. + // Use Good() to check the actual status of the output stream. + off_t Seek(off_t offset, Whence whence) OVERRIDE { + // We keep shadow copy of the offset so that we return + // the expected value even if the output stream failed. + off_t new_offset; + switch (whence) { + case kSeekSet: + new_offset = offset; + break; + case kSeekCurrent: + new_offset = output_offset_ + offset; + break; + default: + LOG(FATAL) << "Unsupported seek type: " << whence; + UNREACHABLE(); + } + if (output_good_) { + off_t actual_offset = output_->Seek(offset, whence); + if (actual_offset == static_cast<off_t>(-1)) { + PLOG(ERROR) << "Failed to seek in " << GetLocation() << ". Offset=" << offset + << " whence=" << whence << " new_offset=" << new_offset; + output_good_ = false; + } + DCHECK_EQ(actual_offset, new_offset); + } + output_offset_ = new_offset; + return new_offset; + } + + // Flush the output and return whether all operations have succeeded. + // Do nothing if we already have a pending error. + bool Flush() OVERRIDE { + if (output_good_) { + output_good_ = output_->Flush(); + } + return output_good_; + } + + // Check (without flushing) whether all operations have succeeded so far. + bool Good() const { + return output_good_; + } + + private: + OutputStream* output_; + bool output_good_; // True if all writes to output succeeded. + off_t output_offset_; // Keep track of the current position in the stream. +}; + +} // namespace art + +#endif // ART_COMPILER_LINKER_ERROR_DELAYING_OUTPUT_STREAM_H_ diff --git a/compiler/file_output_stream.cc b/compiler/linker/file_output_stream.cc index bbfbdfdca8..bbfbdfdca8 100644 --- a/compiler/file_output_stream.cc +++ b/compiler/linker/file_output_stream.cc diff --git a/compiler/file_output_stream.h b/compiler/linker/file_output_stream.h index 6917d83f29..f2d845379f 100644 --- a/compiler/file_output_stream.h +++ b/compiler/linker/file_output_stream.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ART_COMPILER_FILE_OUTPUT_STREAM_H_ -#define ART_COMPILER_FILE_OUTPUT_STREAM_H_ +#ifndef ART_COMPILER_LINKER_FILE_OUTPUT_STREAM_H_ +#define ART_COMPILER_LINKER_FILE_OUTPUT_STREAM_H_ #include "output_stream.h" @@ -43,4 +43,4 @@ class FileOutputStream FINAL : public OutputStream { } // namespace art -#endif // ART_COMPILER_FILE_OUTPUT_STREAM_H_ +#endif // ART_COMPILER_LINKER_FILE_OUTPUT_STREAM_H_ diff --git a/compiler/output_stream.cc b/compiler/linker/output_stream.cc index a8b64ca1ce..a8b64ca1ce 100644 --- a/compiler/output_stream.cc +++ b/compiler/linker/output_stream.cc diff --git a/compiler/output_stream.h b/compiler/linker/output_stream.h index 8f6b6d8235..96a5f489f0 100644 --- a/compiler/output_stream.h +++ b/compiler/linker/output_stream.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ART_COMPILER_OUTPUT_STREAM_H_ -#define ART_COMPILER_OUTPUT_STREAM_H_ +#ifndef ART_COMPILER_LINKER_OUTPUT_STREAM_H_ +#define ART_COMPILER_LINKER_OUTPUT_STREAM_H_ #include <ostream> #include <string> @@ -61,4 +61,4 @@ class OutputStream { } // namespace art -#endif // ART_COMPILER_OUTPUT_STREAM_H_ +#endif // ART_COMPILER_LINKER_OUTPUT_STREAM_H_ diff --git a/compiler/output_stream_test.cc b/compiler/linker/output_stream_test.cc index 84c76f2c6c..84c76f2c6c 100644 --- a/compiler/output_stream_test.cc +++ b/compiler/linker/output_stream_test.cc diff --git a/compiler/vector_output_stream.cc b/compiler/linker/vector_output_stream.cc index 3d33673e1a..f758005c52 100644 --- a/compiler/vector_output_stream.cc +++ b/compiler/linker/vector_output_stream.cc @@ -21,7 +21,7 @@ namespace art { VectorOutputStream::VectorOutputStream(const std::string& location, std::vector<uint8_t>* vector) - : OutputStream(location), offset_(vector->size()), vector_(vector) {} + : OutputStream(location), offset_(vector->size()), vector_(vector) {} off_t VectorOutputStream::Seek(off_t offset, Whence whence) { CHECK(whence == kSeekSet || whence == kSeekCurrent || whence == kSeekEnd) << whence; diff --git a/compiler/vector_output_stream.h b/compiler/linker/vector_output_stream.h index a3c58d0800..321014374e 100644 --- a/compiler/vector_output_stream.h +++ b/compiler/linker/vector_output_stream.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ART_COMPILER_VECTOR_OUTPUT_STREAM_H_ -#define ART_COMPILER_VECTOR_OUTPUT_STREAM_H_ +#ifndef ART_COMPILER_LINKER_VECTOR_OUTPUT_STREAM_H_ +#define ART_COMPILER_LINKER_VECTOR_OUTPUT_STREAM_H_ #include "output_stream.h" @@ -66,4 +66,4 @@ class VectorOutputStream FINAL : public OutputStream { } // namespace art -#endif // ART_COMPILER_VECTOR_OUTPUT_STREAM_H_ +#endif // ART_COMPILER_LINKER_VECTOR_OUTPUT_STREAM_H_ diff --git a/compiler/oat_test.cc b/compiler/oat_test.cc index c305b120cb..b8610d0a7a 100644 --- a/compiler/oat_test.cc +++ b/compiler/oat_test.cc @@ -31,13 +31,13 @@ #include "elf_writer.h" #include "elf_writer_quick.h" #include "entrypoints/quick/quick_entrypoints.h" +#include "linker/vector_output_stream.h" #include "mirror/class-inl.h" #include "mirror/object_array-inl.h" #include "mirror/object-inl.h" #include "oat_file-inl.h" #include "oat_writer.h" #include "scoped_thread_state_change.h" -#include "vector_output_stream.h" namespace art { diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc index 901580079a..e8e775f2c8 100644 --- a/compiler/oat_writer.cc +++ b/compiler/oat_writer.cc @@ -36,6 +36,7 @@ #include "gc/space/space.h" #include "handle_scope-inl.h" #include "image_writer.h" +#include "linker/output_stream.h" #include "linker/relative_patcher.h" #include "mirror/array.h" #include "mirror/class_loader.h" @@ -43,7 +44,6 @@ #include "mirror/object-inl.h" #include "oat_quick_method_header.h" #include "os.h" -#include "output_stream.h" #include "safe_map.h" #include "scoped_thread_state_change.h" #include "type_lookup_table.h" diff --git a/compiler/optimizing/bounds_check_elimination.cc b/compiler/optimizing/bounds_check_elimination.cc index a44830207d..7dbfd7c58e 100644 --- a/compiler/optimizing/bounds_check_elimination.cc +++ b/compiler/optimizing/bounds_check_elimination.cc @@ -1228,19 +1228,26 @@ class BCEVisitor : public HGraphVisitor { InductionVarRange::Value v2; bool needs_finite_test = false; induction_range_.GetInductionRange(context, index, &v1, &v2, &needs_finite_test); - if (v1.is_known && (v1.a_constant == 0 || v1.a_constant == 1) && - v2.is_known && (v2.a_constant == 0 || v2.a_constant == 1)) { - DCHECK(v1.a_constant == 1 || v1.instruction == nullptr); - DCHECK(v2.a_constant == 1 || v2.instruction == nullptr); - ValueRange index_range(GetGraph()->GetArena(), - ValueBound(v1.instruction, v1.b_constant), - ValueBound(v2.instruction, v2.b_constant)); - // If analysis reveals a certain OOB, disable dynamic BCE. - *try_dynamic_bce = !index_range.GetLower().LessThan(array_range->GetLower()) && - !index_range.GetUpper().GreaterThan(array_range->GetUpper()); - // Use analysis for static bce only if loop is finite. - return !needs_finite_test && index_range.FitsIn(array_range); - } + do { + if (v1.is_known && (v1.a_constant == 0 || v1.a_constant == 1) && + v2.is_known && (v2.a_constant == 0 || v2.a_constant == 1)) { + DCHECK(v1.a_constant == 1 || v1.instruction == nullptr); + DCHECK(v2.a_constant == 1 || v2.instruction == nullptr); + ValueRange index_range(GetGraph()->GetArena(), + ValueBound(v1.instruction, v1.b_constant), + ValueBound(v2.instruction, v2.b_constant)); + // If analysis reveals a certain OOB, disable dynamic BCE. + if (index_range.GetLower().LessThan(array_range->GetLower()) || + index_range.GetUpper().GreaterThan(array_range->GetUpper())) { + *try_dynamic_bce = false; + return false; + } + // Use analysis for static bce only if loop is finite. + if (!needs_finite_test && index_range.FitsIn(array_range)) { + return true; + } + } + } while (induction_range_.RefineOuter(&v1, &v2)); return false; } diff --git a/compiler/optimizing/induction_var_range.cc b/compiler/optimizing/induction_var_range.cc index 2ac1e152e1..9d0cde7c9f 100644 --- a/compiler/optimizing/induction_var_range.cc +++ b/compiler/optimizing/induction_var_range.cc @@ -119,6 +119,17 @@ void InductionVarRange::GetInductionRange(HInstruction* context, } } +bool InductionVarRange::RefineOuter(/*in-out*/Value* min_val, /*in-out*/Value* max_val) { + Value v1 = RefineOuter(*min_val, /* is_min */ true); + Value v2 = RefineOuter(*max_val, /* is_min */ false); + if (v1.instruction != min_val->instruction || v2.instruction != max_val->instruction) { + *min_val = v1; + *max_val = v2; + return true; + } + return false; +} + bool InductionVarRange::CanGenerateCode(HInstruction* context, HInstruction* instruction, /*out*/bool* needs_finite_test, @@ -202,6 +213,8 @@ InductionVarRange::Value InductionVarRange::GetFetch(HInstruction* instruction, } else if (IsIntAndGet(instruction->InputAt(1), &value)) { return AddValue(GetFetch(instruction->InputAt(0), trip, in_body, is_min), Value(value)); } + } else if (instruction->IsArrayLength() && instruction->InputAt(0)->IsNewArray()) { + return GetFetch(instruction->InputAt(0)->InputAt(0), trip, in_body, is_min); } else if (is_min) { // Special case for finding minimum: minimum of trip-count in loop-body is 1. if (trip != nullptr && in_body && instruction == trip->op_a->fetch) { @@ -404,6 +417,25 @@ InductionVarRange::Value InductionVarRange::MergeVal(Value v1, Value v2, bool is return Value(); } +InductionVarRange::Value InductionVarRange::RefineOuter(Value v, bool is_min) { + if (v.instruction != nullptr) { + HLoopInformation* loop = + v.instruction->GetBlock()->GetLoopInformation(); // closest enveloping loop + if (loop != nullptr) { + // Set up loop information. + bool in_body = true; // use is always in body of outer loop + HInductionVarAnalysis::InductionInfo* info = + induction_analysis_->LookupInfo(loop, v.instruction); + HInductionVarAnalysis::InductionInfo* trip = + induction_analysis_->LookupInfo(loop, loop->GetHeader()->GetLastInstruction()); + // Try to refine "a x instruction + b" with outer loop range information on instruction. + return AddValue(MulValue(Value(v.a_constant), GetVal(info, trip, in_body, is_min)), + Value(v.b_constant)); + } + } + return v; +} + bool InductionVarRange::GenerateCode(HInstruction* context, HInstruction* instruction, HGraph* graph, diff --git a/compiler/optimizing/induction_var_range.h b/compiler/optimizing/induction_var_range.h index 7984871b08..71b0b1b4c3 100644 --- a/compiler/optimizing/induction_var_range.h +++ b/compiler/optimizing/induction_var_range.h @@ -68,6 +68,9 @@ class InductionVarRange { /*out*/Value* max_val, /*out*/bool* needs_finite_test); + /** Refines the values with induction of next outer loop. Returns true on change. */ + bool RefineOuter(/*in-out*/Value* min_val, /*in-out*/Value* max_val); + /** * Returns true if range analysis is able to generate code for the lower and upper * bound expressions on the instruction in the given context. The need_finite_test @@ -149,6 +152,12 @@ class InductionVarRange { static Value MergeVal(Value v1, Value v2, bool is_min); /** + * Returns refined value using induction of next outer loop or the input value if no + * further refinement is possible. + */ + Value RefineOuter(Value val, bool is_min); + + /** * Generates code for lower/upper/taken-test in the HIR. Returns true on success. * With values nullptr, the method can be used to determine if code generation * would be successful without generating actual code yet. diff --git a/compiler/optimizing/induction_var_range_test.cc b/compiler/optimizing/induction_var_range_test.cc index c2ba157ed8..128b5bb811 100644 --- a/compiler/optimizing/induction_var_range_test.cc +++ b/compiler/optimizing/induction_var_range_test.cc @@ -473,16 +473,19 @@ TEST_F(InductionVarRangeTest, ConstantTripCountUp) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(999), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(1), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); } TEST_F(InductionVarRangeTest, ConstantTripCountDown) { @@ -498,16 +501,19 @@ TEST_F(InductionVarRangeTest, ConstantTripCountDown) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(1), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(999), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); } TEST_F(InductionVarRangeTest, SymbolicTripCountUp) { @@ -527,16 +533,19 @@ TEST_F(InductionVarRangeTest, SymbolicTripCountUp) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(parameter, 1, -1), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(1), v1); ExpectEqual(Value(parameter, 1, 0), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); HInstruction* lower = nullptr; HInstruction* upper = nullptr; @@ -597,16 +606,19 @@ TEST_F(InductionVarRangeTest, SymbolicTripCountDown) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(parameter, 1, 1), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(parameter, 1, 0), v1); ExpectEqual(Value(999), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); HInstruction* lower = nullptr; HInstruction* upper = nullptr; diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 6d93be37a7..a4dcb3aeba 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -171,13 +171,37 @@ static uint32_t FindMethodIndexIn(ArtMethod* method, const DexFile& dex_file, uint32_t referrer_index) SHARED_REQUIRES(Locks::mutator_lock_) { - if (method->GetDexFile()->GetLocation().compare(dex_file.GetLocation()) == 0) { + if (IsSameDexFile(*method->GetDexFile(), dex_file)) { return method->GetDexMethodIndex(); } else { return method->FindDexMethodIndexInOtherDexFile(dex_file, referrer_index); } } +static uint32_t FindClassIndexIn(mirror::Class* cls, const DexFile& dex_file) + SHARED_REQUIRES(Locks::mutator_lock_) { + if (cls->GetDexCache() == nullptr) { + DCHECK(cls->IsArrayClass()); + // TODO: find the class in `dex_file`. + return DexFile::kDexNoIndex; + } else if (cls->GetDexTypeIndex() == DexFile::kDexNoIndex16) { + // TODO: deal with proxy classes. + return DexFile::kDexNoIndex; + } else if (IsSameDexFile(cls->GetDexFile(), dex_file)) { + // Update the dex cache to ensure the class is in. The generated code will + // consider it is. We make it safe by updating the dex cache, as other + // dex files might also load the class, and there is no guarantee the dex + // cache of the dex file of the class will be updated. + if (cls->GetDexCache()->GetResolvedType(cls->GetDexTypeIndex()) == nullptr) { + cls->GetDexCache()->SetResolvedType(cls->GetDexTypeIndex(), cls); + } + return cls->GetDexTypeIndex(); + } else { + // TODO: find the class in `dex_file`. + return DexFile::kDexNoIndex; + } +} + bool HInliner::TryInline(HInvoke* invoke_instruction) { if (invoke_instruction->IsInvokeUnresolved()) { return false; // Don't bother to move further if we know the method is unresolved. @@ -214,53 +238,176 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { return false; } - if (!invoke_instruction->IsInvokeStaticOrDirect()) { - resolved_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method); - if (resolved_method == nullptr) { + if (invoke_instruction->IsInvokeStaticOrDirect()) { + return TryInline(invoke_instruction, resolved_method); + } + + // Check if we can statically find the method. + ArtMethod* actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method); + if (actual_method != nullptr) { + return TryInline(invoke_instruction, actual_method); + } + + // Check if we can use an inline cache. + ArtMethod* caller = graph_->GetArtMethod(); + size_t pointer_size = class_linker->GetImagePointerSize(); + // Under JIT, we should always know the caller. + DCHECK(!Runtime::Current()->UseJit() || (caller != nullptr)); + if (caller != nullptr && caller->GetProfilingInfo(pointer_size) != nullptr) { + ProfilingInfo* profiling_info = caller->GetProfilingInfo(pointer_size); + const InlineCache& ic = *profiling_info->GetInlineCache(invoke_instruction->GetDexPc()); + if (ic.IsUnitialized()) { VLOG(compiler) << "Interface or virtual call to " << PrettyMethod(method_index, caller_dex_file) - << " could not be statically determined"; + << " is not hit and not inlined"; return false; - } - // We have found a method, but we need to find where that method is for the caller's - // dex file. - method_index = FindMethodIndexIn(resolved_method, caller_dex_file, method_index); - if (method_index == DexFile::kDexNoIndex) { + } else if (ic.IsMonomorphic()) { + MaybeRecordStat(kMonomorphicCall); + return TryInlineMonomorphicCall(invoke_instruction, resolved_method, ic); + } else if (ic.IsPolymorphic()) { + MaybeRecordStat(kPolymorphicCall); + return TryInlinePolymorphicCall(invoke_instruction, resolved_method, ic); + } else { + DCHECK(ic.IsMegamorphic()); VLOG(compiler) << "Interface or virtual call to " - << PrettyMethod(resolved_method) - << " cannot be inlined because unaccessible to caller"; + << PrettyMethod(method_index, caller_dex_file) + << " is megamorphic and not inlined"; + MaybeRecordStat(kMegamorphicCall); return false; } } - bool same_dex_file = - IsSameDexFile(*outer_compilation_unit_.GetDexFile(), *resolved_method->GetDexFile()); + VLOG(compiler) << "Interface or virtual call to " + << PrettyMethod(method_index, caller_dex_file) + << " could not be statically determined"; + return false; +} - const DexFile::CodeItem* code_item = resolved_method->GetCodeItem(); +bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction, + ArtMethod* resolved_method, + const InlineCache& ic) { + const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile(); + uint32_t class_index = FindClassIndexIn(ic.GetMonomorphicType(), caller_dex_file); + if (class_index == DexFile::kDexNoIndex) { + VLOG(compiler) << "Call to " << PrettyMethod(resolved_method) + << " from inline cache is not inlined because its class is not" + << " accessible to the caller"; + return false; + } + + ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker(); + size_t pointer_size = class_linker->GetImagePointerSize(); + if (invoke_instruction->IsInvokeInterface()) { + resolved_method = ic.GetMonomorphicType()->FindVirtualMethodForInterface( + resolved_method, pointer_size); + } else { + DCHECK(invoke_instruction->IsInvokeVirtual()); + resolved_method = ic.GetMonomorphicType()->FindVirtualMethodForVirtual( + resolved_method, pointer_size); + } + DCHECK(resolved_method != nullptr); + HInstruction* receiver = invoke_instruction->InputAt(0); + HInstruction* cursor = invoke_instruction->GetPrevious(); + HBasicBlock* bb_cursor = invoke_instruction->GetBlock(); + + if (!TryInline(invoke_instruction, resolved_method, /* do_rtp */ false)) { + return false; + } + + // We successfully inlined, now add a guard. + ArtField* field = class_linker->GetClassRoot(ClassLinker::kJavaLangObject)->GetInstanceField(0); + DCHECK_EQ(std::string(field->GetName()), "shadow$_klass_"); + HInstanceFieldGet* field_get = new (graph_->GetArena()) HInstanceFieldGet( + receiver, + Primitive::kPrimNot, + field->GetOffset(), + field->IsVolatile(), + field->GetDexFieldIndex(), + field->GetDeclaringClass()->GetDexClassDefIndex(), + *field->GetDexFile(), + handles_->NewHandle(field->GetDexCache()), + invoke_instruction->GetDexPc()); + + bool is_referrer = + (ic.GetMonomorphicType() == outermost_graph_->GetArtMethod()->GetDeclaringClass()); + HLoadClass* load_class = new (graph_->GetArena()) HLoadClass(graph_->GetCurrentMethod(), + class_index, + caller_dex_file, + is_referrer, + invoke_instruction->GetDexPc(), + /* needs_access_check */ false, + /* is_in_dex_cache */ true); + + HNotEqual* compare = new (graph_->GetArena()) HNotEqual(load_class, field_get); + HDeoptimize* deoptimize = new (graph_->GetArena()) HDeoptimize( + compare, invoke_instruction->GetDexPc()); + // TODO: Extend reference type propagation to understand the guard. + if (cursor != nullptr) { + bb_cursor->InsertInstructionAfter(load_class, cursor); + } else { + bb_cursor->InsertInstructionBefore(load_class, bb_cursor->GetFirstInstruction()); + } + bb_cursor->InsertInstructionAfter(field_get, load_class); + bb_cursor->InsertInstructionAfter(compare, field_get); + bb_cursor->InsertInstructionAfter(deoptimize, compare); + deoptimize->CopyEnvironmentFrom(invoke_instruction->GetEnvironment()); + + // Run type propagation to get the guard typed, and eventually propagate the + // type of the receiver. + ReferenceTypePropagation rtp_fixup(graph_, handles_); + rtp_fixup.Run(); + + MaybeRecordStat(kInlinedMonomorphicCall); + return true; +} + +bool HInliner::TryInlinePolymorphicCall(HInvoke* invoke_instruction ATTRIBUTE_UNUSED, + ArtMethod* resolved_method, + const InlineCache& ic ATTRIBUTE_UNUSED) { + // TODO + VLOG(compiler) << "Unimplemented polymorphic inlining for " + << PrettyMethod(resolved_method); + return false; +} + +bool HInliner::TryInline(HInvoke* invoke_instruction, ArtMethod* method, bool do_rtp) { + const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile(); + uint32_t method_index = FindMethodIndexIn( + method, caller_dex_file, invoke_instruction->GetDexMethodIndex()); + if (method_index == DexFile::kDexNoIndex) { + VLOG(compiler) << "Call to " + << PrettyMethod(method) + << " cannot be inlined because unaccessible to caller"; + return false; + } + + bool same_dex_file = IsSameDexFile(*outer_compilation_unit_.GetDexFile(), *method->GetDexFile()); + + const DexFile::CodeItem* code_item = method->GetCodeItem(); if (code_item == nullptr) { - VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) + VLOG(compiler) << "Method " << PrettyMethod(method) << " is not inlined because it is native"; return false; } size_t inline_max_code_units = compiler_driver_->GetCompilerOptions().GetInlineMaxCodeUnits(); if (code_item->insns_size_in_code_units_ > inline_max_code_units) { - VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) + VLOG(compiler) << "Method " << PrettyMethod(method) << " is too big to inline"; return false; } if (code_item->tries_size_ != 0) { - VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) + VLOG(compiler) << "Method " << PrettyMethod(method) << " is not inlined because of try block"; return false; } - if (!resolved_method->GetDeclaringClass()->IsVerified()) { - uint16_t class_def_idx = resolved_method->GetDeclaringClass()->GetDexClassDefIndex(); + if (!method->GetDeclaringClass()->IsVerified()) { + uint16_t class_def_idx = method->GetDeclaringClass()->GetDexClassDefIndex(); if (!compiler_driver_->IsMethodVerifiedWithoutFailures( - resolved_method->GetDexMethodIndex(), class_def_idx, *resolved_method->GetDexFile())) { + method->GetDexMethodIndex(), class_def_idx, *method->GetDexFile())) { VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) << " couldn't be verified, so it cannot be inlined"; return false; @@ -277,7 +424,7 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { return false; } - if (!TryBuildAndInline(resolved_method, invoke_instruction, same_dex_file)) { + if (!TryBuildAndInline(method, invoke_instruction, same_dex_file, do_rtp)) { return false; } @@ -288,7 +435,8 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, HInvoke* invoke_instruction, - bool same_dex_file) { + bool same_dex_file, + bool do_rtp) { ScopedObjectAccess soa(Thread::Current()); const DexFile::CodeItem* code_item = resolved_method->GetCodeItem(); const DexFile& callee_dex_file = *resolved_method->GetDexFile(); @@ -341,6 +489,7 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, invoke_type, graph_->IsDebuggable(), graph_->GetCurrentInstructionId()); + callee_graph->SetArtMethod(resolved_method); OptimizingCompilerStats inline_stats; HGraphBuilder builder(callee_graph, @@ -422,6 +571,7 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, size_t number_of_instructions_budget = kMaximumNumberOfHInstructions; if (depth_ + 1 < compiler_driver_->GetCompilerOptions().GetInlineDepthLimit()) { HInliner inliner(callee_graph, + outermost_graph_, codegen_, outer_compilation_unit_, dex_compilation_unit, @@ -533,9 +683,9 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, HNullConstant* null_constant = graph_->GetNullConstant(); if (!null_constant->GetReferenceTypeInfo().IsValid()) { ReferenceTypeInfo::TypeHandle obj_handle = - handles_->NewHandle(class_linker->GetClassRoot(ClassLinker::kJavaLangObject)); + handles_->NewHandle(class_linker->GetClassRoot(ClassLinker::kJavaLangObject)); null_constant->SetReferenceTypeInfo( - ReferenceTypeInfo::Create(obj_handle, false /* is_exact */)); + ReferenceTypeInfo::Create(obj_handle, false /* is_exact */)); } // Check the integrity of reference types and run another type propagation if needed. @@ -554,14 +704,16 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, return_handle, return_handle->CannotBeAssignedFromOtherTypes() /* is_exact */)); } - // If the return type is a refinement of the declared type run the type propagation again. - ReferenceTypeInfo return_rti = return_replacement->GetReferenceTypeInfo(); - ReferenceTypeInfo invoke_rti = invoke_instruction->GetReferenceTypeInfo(); - if (invoke_rti.IsStrictSupertypeOf(return_rti) - || (return_rti.IsExact() && !invoke_rti.IsExact()) - || !return_replacement->CanBeNull()) { - ReferenceTypePropagation rtp_fixup(graph_, handles_); - rtp_fixup.Run(); + if (do_rtp) { + // If the return type is a refinement of the declared type run the type propagation again. + ReferenceTypeInfo return_rti = return_replacement->GetReferenceTypeInfo(); + ReferenceTypeInfo invoke_rti = invoke_instruction->GetReferenceTypeInfo(); + if (invoke_rti.IsStrictSupertypeOf(return_rti) + || (return_rti.IsExact() && !invoke_rti.IsExact()) + || !return_replacement->CanBeNull()) { + ReferenceTypePropagation rtp_fixup(graph_, handles_); + rtp_fixup.Run(); + } } } diff --git a/compiler/optimizing/inliner.h b/compiler/optimizing/inliner.h index 0f6a9453be..7b9fb73ccf 100644 --- a/compiler/optimizing/inliner.h +++ b/compiler/optimizing/inliner.h @@ -27,11 +27,13 @@ class CompilerDriver; class DexCompilationUnit; class HGraph; class HInvoke; +class InlineCache; class OptimizingCompilerStats; class HInliner : public HOptimization { public: HInliner(HGraph* outer_graph, + HGraph* outermost_graph, CodeGenerator* codegen, const DexCompilationUnit& outer_compilation_unit, const DexCompilationUnit& caller_compilation_unit, @@ -40,6 +42,7 @@ class HInliner : public HOptimization { OptimizingCompilerStats* stats, size_t depth = 0) : HOptimization(outer_graph, kInlinerPassName, stats), + outermost_graph_(outermost_graph), outer_compilation_unit_(outer_compilation_unit), caller_compilation_unit_(caller_compilation_unit), codegen_(codegen), @@ -54,10 +57,33 @@ class HInliner : public HOptimization { private: bool TryInline(HInvoke* invoke_instruction); + + // Try to inline `resolved_method` in place of `invoke_instruction`. `do_rtp` is whether + // reference type propagation can run after the inlining. + bool TryInline(HInvoke* invoke_instruction, ArtMethod* resolved_method, bool do_rtp = true) + SHARED_REQUIRES(Locks::mutator_lock_); + + // Try to inline the target of a monomorphic call. If successful, the code + // in the graph will look like: + // if (receiver.getClass() != ic.GetMonomorphicType()) deopt + // ... // inlined code + bool TryInlineMonomorphicCall(HInvoke* invoke_instruction, + ArtMethod* resolved_method, + const InlineCache& ic) + SHARED_REQUIRES(Locks::mutator_lock_); + + // Try to inline targets of a polymorphic call. Currently unimplemented. + bool TryInlinePolymorphicCall(HInvoke* invoke_instruction, + ArtMethod* resolved_method, + const InlineCache& ic) + SHARED_REQUIRES(Locks::mutator_lock_); + bool TryBuildAndInline(ArtMethod* resolved_method, HInvoke* invoke_instruction, - bool same_dex_file); + bool same_dex_file, + bool do_rtp = true); + HGraph* const outermost_graph_; const DexCompilationUnit& outer_compilation_unit_; const DexCompilationUnit& caller_compilation_unit_; CodeGenerator* const codegen_; diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc index d2017da221..5329b5c1b7 100644 --- a/compiler/optimizing/intrinsics_arm.cc +++ b/compiler/optimizing/intrinsics_arm.cc @@ -113,10 +113,10 @@ void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { @@ -127,10 +127,10 @@ void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -358,7 +358,7 @@ void IntrinsicLocationsBuilderARM::VisitIntegerRotateRight(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitIntegerRotateRight(HInvoke* invoke) { - GenIntegerRotate(invoke->GetLocations(), GetAssembler(), false /* is_left */); + GenIntegerRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ false); } void IntrinsicLocationsBuilderARM::VisitLongRotateRight(HInvoke* invoke) { @@ -377,7 +377,7 @@ void IntrinsicLocationsBuilderARM::VisitLongRotateRight(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitLongRotateRight(HInvoke* invoke) { - GenLongRotate(invoke->GetLocations(), GetAssembler(), false /* is_left */); + GenLongRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ false); } void IntrinsicLocationsBuilderARM::VisitIntegerRotateLeft(HInvoke* invoke) { @@ -390,7 +390,7 @@ void IntrinsicLocationsBuilderARM::VisitIntegerRotateLeft(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitIntegerRotateLeft(HInvoke* invoke) { - GenIntegerRotate(invoke->GetLocations(), GetAssembler(), true /* is_left */); + GenIntegerRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ true); } void IntrinsicLocationsBuilderARM::VisitLongRotateLeft(HInvoke* invoke) { @@ -409,7 +409,7 @@ void IntrinsicLocationsBuilderARM::VisitLongRotateLeft(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitLongRotateLeft(HInvoke* invoke) { - GenLongRotate(invoke->GetLocations(), GetAssembler(), true /* is_left */); + GenLongRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ true); } static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { @@ -429,7 +429,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) { @@ -437,7 +437,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) { @@ -486,7 +486,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } @@ -495,7 +495,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenMinMax(LocationSummary* locations, @@ -526,7 +526,7 @@ void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) { @@ -534,7 +534,7 @@ void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) { @@ -742,22 +742,22 @@ void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) } void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, @@ -787,31 +787,34 @@ static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, } void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke); + CreateIntIntIntIntToVoid( + arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke); + CreateIntIntIntIntToVoid( + arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke); + CreateIntIntIntIntToVoid( + arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke); } static void GenUnsafePut(LocationSummary* locations, @@ -873,31 +876,67 @@ static void GenUnsafePut(LocationSummary* locations, } void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, @@ -1245,7 +1284,8 @@ void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateVisitStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) { @@ -1265,7 +1305,8 @@ void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateVisitStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) { @@ -1644,7 +1685,7 @@ void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) { temp2, dest, Register(kNoRegister), - false); + /* can_be_null */ false); __ Bind(slow_path->GetExitLabel()); } diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index b04dcceb05..962c4d5167 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -202,10 +202,10 @@ void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke } void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { @@ -216,10 +216,10 @@ void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -477,7 +477,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { @@ -485,7 +485,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -514,7 +514,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { @@ -522,7 +522,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } static void GenMinMaxFP(LocationSummary* locations, @@ -557,7 +557,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { @@ -565,7 +565,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { @@ -573,7 +573,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { @@ -581,7 +581,8 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler()); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler()); } static void GenMinMax(LocationSummary* locations, @@ -614,7 +615,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { @@ -622,7 +623,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { @@ -630,7 +631,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { @@ -638,7 +639,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) { @@ -714,7 +715,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) { - GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler()); + GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { @@ -722,7 +723,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) { - GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler()); + GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) { @@ -895,22 +896,22 @@ void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invok } void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { @@ -1001,31 +1002,67 @@ static void GenUnsafePut(LocationSummary* locations, } void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -1379,7 +1416,8 @@ void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true); + GenerateVisitStringIndexOf( + invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { @@ -1399,7 +1437,8 @@ void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false); + GenerateVisitStringIndexOf( + invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc index 326844526e..9ecce0e93a 100644 --- a/compiler/optimizing/intrinsics_mips.cc +++ b/compiler/optimizing/intrinsics_mips.cc @@ -43,6 +43,14 @@ ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() { return codegen_->GetGraph()->GetArena(); } +inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() { + return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(); +} + +inline bool IntrinsicCodeGeneratorMIPS::IsR6() { + return codegen_->GetInstructionSetFeatures().IsR6(); +} + #define __ codegen->GetAssembler()-> static void MoveFromReturnRegister(Location trg, @@ -168,7 +176,7 @@ void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invo } void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // int java.lang.Float.floatToRawIntBits(float) @@ -177,7 +185,7 @@ void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -210,7 +218,7 @@ void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // float java.lang.Float.intBitsToFloat(int) @@ -219,24 +227,29 @@ void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } -static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { +static void CreateIntToIntLocations(ArenaAllocator* arena, + HInvoke* invoke, + Location::OutputOverlap overlaps = Location::kNoOutputOverlap) { LocationSummary* locations = new (arena) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); - locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); + locations->SetOut(Location::RequiresRegister(), overlaps); } -static void GenReverseBytes(LocationSummary* locations, - Primitive::Type type, - MipsAssembler* assembler, - bool isR2OrNewer) { +static void GenReverse(LocationSummary* locations, + Primitive::Type type, + bool isR2OrNewer, + bool isR6, + bool reverseBits, + MipsAssembler* assembler) { DCHECK(type == Primitive::kPrimShort || type == Primitive::kPrimInt || type == Primitive::kPrimLong); + DCHECK(type != Primitive::kPrimShort || !reverseBits); if (type == Primitive::kPrimShort) { Register in = locations->InAt(0).AsRegister<Register>(); @@ -273,6 +286,30 @@ static void GenReverseBytes(LocationSummary* locations, __ And(out, out, AT); __ Or(out, out, TMP); } + if (reverseBits) { + if (isR6) { + __ Bitswap(out, out); + } else { + __ LoadConst32(AT, 0x0F0F0F0F); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out, out, 4); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x33333333); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out, out, 2); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x55555555); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out, out, 1); + __ And(out, out, AT); + __ Or(out, TMP, out); + } + } } else if (type == Primitive::kPrimLong) { Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); @@ -314,6 +351,46 @@ static void GenReverseBytes(LocationSummary* locations, __ And(out_lo, out_lo, AT); __ Or(out_lo, out_lo, TMP); } + if (reverseBits) { + if (isR6) { + __ Bitswap(out_hi, out_hi); + __ Bitswap(out_lo, out_lo); + } else { + __ LoadConst32(AT, 0x0F0F0F0F); + __ And(TMP, out_hi, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out_hi, out_hi, 4); + __ And(out_hi, out_hi, AT); + __ Or(out_hi, TMP, out_hi); + __ And(TMP, out_lo, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out_lo, out_lo, 4); + __ And(out_lo, out_lo, AT); + __ Or(out_lo, TMP, out_lo); + __ LoadConst32(AT, 0x33333333); + __ And(TMP, out_hi, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out_hi, out_hi, 2); + __ And(out_hi, out_hi, AT); + __ Or(out_hi, TMP, out_hi); + __ And(TMP, out_lo, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out_lo, out_lo, 2); + __ And(out_lo, out_lo, AT); + __ Or(out_lo, TMP, out_lo); + __ LoadConst32(AT, 0x55555555); + __ And(TMP, out_hi, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out_hi, out_hi, 1); + __ And(out_hi, out_hi, AT); + __ Or(out_hi, TMP, out_hi); + __ And(TMP, out_lo, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out_lo, out_lo, 1); + __ And(out_lo, out_lo, AT); + __ Or(out_lo, TMP, out_lo); + } + } } } @@ -323,10 +400,12 @@ void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) { - GenReverseBytes(invoke->GetLocations(), - Primitive::kPrimInt, - GetAssembler(), - codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2()); + GenReverse(invoke->GetLocations(), + Primitive::kPrimInt, + IsR2OrNewer(), + IsR6(), + false, + GetAssembler()); } // long java.lang.Long.reverseBytes(long) @@ -335,10 +414,12 @@ void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) { - GenReverseBytes(invoke->GetLocations(), - Primitive::kPrimLong, - GetAssembler(), - codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2()); + GenReverse(invoke->GetLocations(), + Primitive::kPrimLong, + IsR2OrNewer(), + IsR6(), + false, + GetAssembler()); } // short java.lang.Short.reverseBytes(short) @@ -347,10 +428,397 @@ void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) { - GenReverseBytes(invoke->GetLocations(), - Primitive::kPrimShort, - GetAssembler(), - codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2()); + GenReverse(invoke->GetLocations(), + Primitive::kPrimShort, + IsR2OrNewer(), + IsR6(), + false, + GetAssembler()); +} + +static void GenNumberOfLeadingZeroes(LocationSummary* locations, + bool is64bit, + bool isR6, + MipsAssembler* assembler) { + Register out = locations->Out().AsRegister<Register>(); + if (is64bit) { + Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + + if (isR6) { + __ ClzR6(AT, in_hi); + __ ClzR6(TMP, in_lo); + __ Seleqz(TMP, TMP, in_hi); + } else { + __ ClzR2(AT, in_hi); + __ ClzR2(TMP, in_lo); + __ Movn(TMP, ZERO, in_hi); + } + __ Addu(out, AT, TMP); + } else { + Register in = locations->InAt(0).AsRegister<Register>(); + + if (isR6) { + __ ClzR6(out, in); + } else { + __ ClzR2(out, in); + } + } +} + +// int java.lang.Integer.numberOfLeadingZeros(int i) +void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { + GenNumberOfLeadingZeroes(invoke->GetLocations(), false, IsR6(), GetAssembler()); +} + +// int java.lang.Long.numberOfLeadingZeros(long i) +void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { + GenNumberOfLeadingZeroes(invoke->GetLocations(), true, IsR6(), GetAssembler()); +} + +static void GenNumberOfTrailingZeroes(LocationSummary* locations, + bool is64bit, + bool isR6, + bool isR2OrNewer, + MipsAssembler* assembler) { + Register out = locations->Out().AsRegister<Register>(); + Register in_lo; + Register in; + + if (is64bit) { + MipsLabel done; + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + + in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + + // If in_lo is zero then count the number of trailing zeroes in in_hi; + // otherwise count the number of trailing zeroes in in_lo. + // AT = in_lo ? in_lo : in_hi; + if (isR6) { + __ Seleqz(out, in_hi, in_lo); + __ Selnez(TMP, in_lo, in_lo); + __ Or(out, out, TMP); + } else { + __ Movz(out, in_hi, in_lo); + __ Movn(out, in_lo, in_lo); + } + + in = out; + } else { + in = locations->InAt(0).AsRegister<Register>(); + // Give in_lo a dummy value to keep the compiler from complaining. + // Since we only get here in the 32-bit case, this value will never + // be used. + in_lo = in; + } + + // We don't have an instruction to count the number of trailing zeroes. + // Start by flipping the bits end-for-end so we can count the number of + // leading zeroes instead. + if (isR2OrNewer) { + __ Rotr(out, in, 16); + __ Wsbh(out, out); + } else { + // MIPS32r1 + // __ Rotr(out, in, 16); + __ Sll(TMP, in, 16); + __ Srl(out, in, 16); + __ Or(out, out, TMP); + // __ Wsbh(out, out); + __ LoadConst32(AT, 0x00FF00FF); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 8); + __ Srl(out, out, 8); + __ And(out, out, AT); + __ Or(out, out, TMP); + } + + if (isR6) { + __ Bitswap(out, out); + __ ClzR6(out, out); + } else { + __ LoadConst32(AT, 0x0F0F0F0F); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out, out, 4); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x33333333); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out, out, 2); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x55555555); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out, out, 1); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ ClzR2(out, out); + } + + if (is64bit) { + // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the + // number of trailing zeroes in in_lo (32) to get the correct final count + __ LoadConst32(TMP, 32); + if (isR6) { + __ Seleqz(TMP, TMP, in_lo); + } else { + __ Movn(TMP, ZERO, in_lo); + } + __ Addu(out, out, TMP); + } +} + +// int java.lang.Integer.numberOfTrailingZeros(int i) +void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { + GenNumberOfTrailingZeroes(invoke->GetLocations(), false, IsR6(), IsR2OrNewer(), GetAssembler()); +} + +// int java.lang.Long.numberOfTrailingZeros(long i) +void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { + GenNumberOfTrailingZeroes(invoke->GetLocations(), true, IsR6(), IsR2OrNewer(), GetAssembler()); +} + +enum RotationDirection { + kRotateRight, + kRotateLeft, +}; + +static void GenRotate(HInvoke* invoke, + Primitive::Type type, + bool isR2OrNewer, + RotationDirection direction, + MipsAssembler* assembler) { + DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); + + LocationSummary* locations = invoke->GetLocations(); + if (invoke->InputAt(1)->IsIntConstant()) { + int32_t shift = static_cast<int32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()); + if (type == Primitive::kPrimInt) { + Register in = locations->InAt(0).AsRegister<Register>(); + Register out = locations->Out().AsRegister<Register>(); + + shift &= 0x1f; + if (direction == kRotateLeft) { + shift = (32 - shift) & 0x1F; + } + + if (isR2OrNewer) { + if ((shift != 0) || (out != in)) { + __ Rotr(out, in, shift); + } + } else { + if (shift == 0) { + if (out != in) { + __ Move(out, in); + } + } else { + __ Srl(AT, in, shift); + __ Sll(out, in, 32 - shift); + __ Or(out, out, AT); + } + } + } else { // Primitive::kPrimLong + Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + Register out_lo = locations->Out().AsRegisterPairLow<Register>(); + Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); + + shift &= 0x3f; + if (direction == kRotateLeft) { + shift = (64 - shift) & 0x3F; + } + + if (shift == 0) { + __ Move(out_lo, in_lo); + __ Move(out_hi, in_hi); + } else if (shift == 32) { + __ Move(out_lo, in_hi); + __ Move(out_hi, in_lo); + } else if (shift < 32) { + __ Srl(AT, in_lo, shift); + __ Sll(out_lo, in_hi, 32 - shift); + __ Or(out_lo, out_lo, AT); + __ Srl(AT, in_hi, shift); + __ Sll(out_hi, in_lo, 32 - shift); + __ Or(out_hi, out_hi, AT); + } else { + __ Sll(AT, in_lo, 64 - shift); + __ Srl(out_lo, in_hi, shift - 32); + __ Or(out_lo, out_lo, AT); + __ Sll(AT, in_hi, 64 - shift); + __ Srl(out_hi, in_lo, shift - 32); + __ Or(out_hi, out_hi, AT); + } + } + } else { // !invoke->InputAt(1)->IsIntConstant() + Register shamt = locations->InAt(1).AsRegister<Register>(); + if (type == Primitive::kPrimInt) { + Register in = locations->InAt(0).AsRegister<Register>(); + Register out = locations->Out().AsRegister<Register>(); + + if (isR2OrNewer) { + if (direction == kRotateRight) { + __ Rotrv(out, in, shamt); + } else { + // negu tmp, shamt + __ Subu(TMP, ZERO, shamt); + __ Rotrv(out, in, TMP); + } + } else { + if (direction == kRotateRight) { + __ Srlv(AT, in, shamt); + __ Subu(TMP, ZERO, shamt); + __ Sllv(out, in, TMP); + __ Or(out, out, AT); + } else { + __ Sllv(AT, in, shamt); + __ Subu(TMP, ZERO, shamt); + __ Srlv(out, in, TMP); + __ Or(out, out, AT); + } + } + } else { // Primitive::kPrimLong + Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + Register out_lo = locations->Out().AsRegisterPairLow<Register>(); + Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); + + MipsLabel done; + + if (direction == kRotateRight) { + __ Nor(TMP, ZERO, shamt); + __ Srlv(AT, in_lo, shamt); + __ Sll(out_lo, in_hi, 1); + __ Sllv(out_lo, out_lo, TMP); + __ Or(out_lo, out_lo, AT); + __ Srlv(AT, in_hi, shamt); + __ Sll(out_hi, in_lo, 1); + __ Sllv(out_hi, out_hi, TMP); + __ Or(out_hi, out_hi, AT); + } else { + __ Nor(TMP, ZERO, shamt); + __ Sllv(AT, in_lo, shamt); + __ Srl(out_lo, in_hi, 1); + __ Srlv(out_lo, out_lo, TMP); + __ Or(out_lo, out_lo, AT); + __ Sllv(AT, in_hi, shamt); + __ Srl(out_hi, in_lo, 1); + __ Srlv(out_hi, out_hi, TMP); + __ Or(out_hi, out_hi, AT); + } + + __ Andi(TMP, shamt, 32); + __ Beqz(TMP, &done); + __ Move(TMP, out_hi); + __ Move(out_hi, out_lo); + __ Move(out_lo, TMP); + + __ Bind(&done); + } + } +} + +// int java.lang.Integer.rotateRight(int i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateRight(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateRight(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateRight, GetAssembler()); +} + +// long java.lang.Long.rotateRight(long i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitLongRotateRight(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongRotateRight(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateRight, GetAssembler()); +} + +// int java.lang.Integer.rotateLeft(int i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateLeft(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateLeft(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateLeft, GetAssembler()); +} + +// long java.lang.Long.rotateLeft(long i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitLongRotateLeft(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongRotateLeft(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateLeft, GetAssembler()); +} + +// int java.lang.Integer.reverse(int) +void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) { + GenReverse(invoke->GetLocations(), + Primitive::kPrimInt, + IsR2OrNewer(), + IsR6(), + true, + GetAssembler()); +} + +// long java.lang.Long.reverse(long) +void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) { + GenReverse(invoke->GetLocations(), + Primitive::kPrimLong, + IsR2OrNewer(), + IsR6(), + true, + GetAssembler()); } // boolean java.lang.String.equals(Object anObject) @@ -463,10 +931,6 @@ void IntrinsicLocationsBuilderMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUS void IntrinsicCodeGeneratorMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ } -UNIMPLEMENTED_INTRINSIC(IntegerReverse) -UNIMPLEMENTED_INTRINSIC(LongReverse) -UNIMPLEMENTED_INTRINSIC(LongNumberOfLeadingZeros) -UNIMPLEMENTED_INTRINSIC(IntegerNumberOfLeadingZeros) UNIMPLEMENTED_INTRINSIC(MathAbsDouble) UNIMPLEMENTED_INTRINSIC(MathAbsFloat) UNIMPLEMENTED_INTRINSIC(MathAbsInt) @@ -519,12 +983,6 @@ UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter) UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes) UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars) UNIMPLEMENTED_INTRINSIC(StringNewStringFromString) -UNIMPLEMENTED_INTRINSIC(LongRotateLeft) -UNIMPLEMENTED_INTRINSIC(LongRotateRight) -UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros) -UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) -UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) -UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros) UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck) diff --git a/compiler/optimizing/intrinsics_mips.h b/compiler/optimizing/intrinsics_mips.h index c71b3c68b7..19ad5255d5 100644 --- a/compiler/optimizing/intrinsics_mips.h +++ b/compiler/optimizing/intrinsics_mips.h @@ -67,6 +67,9 @@ INTRINSICS_LIST(OPTIMIZING_INTRINSICS) #undef INTRINSICS_LIST #undef OPTIMIZING_INTRINSICS + bool IsR2OrNewer(void); + bool IsR6(void); + private: MipsAssembler* GetAssembler(); diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index ecee11dea6..36e1b20e4e 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -162,7 +162,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* in } void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // int java.lang.Float.floatToRawIntBits(float) @@ -171,7 +171,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invok } void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -199,7 +199,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invok } void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // float java.lang.Float.intBitsToFloat(int) @@ -208,7 +208,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -290,7 +290,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* } void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { - GenNumberOfLeadingZeroes(invoke->GetLocations(), false, GetAssembler()); + GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } // int java.lang.Long.numberOfLeadingZeros(long i) @@ -299,7 +299,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* inv } void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { - GenNumberOfLeadingZeroes(invoke->GetLocations(), true, GetAssembler()); + GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenNumberOfTrailingZeroes(LocationSummary* locations, @@ -327,7 +327,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* } void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { - GenNumberOfTrailingZeroes(invoke->GetLocations(), false, GetAssembler()); + GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } // int java.lang.Long.numberOfTrailingZeros(long i) @@ -336,7 +336,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* in } void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { - GenNumberOfTrailingZeroes(invoke->GetLocations(), true, GetAssembler()); + GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenRotateRight(HInvoke* invoke, @@ -525,7 +525,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // float java.lang.Math.abs(float) @@ -534,7 +534,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -566,7 +566,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } // long java.lang.Math.abs(long) @@ -575,7 +575,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenMinMaxFP(LocationSummary* locations, @@ -616,7 +616,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetAssembler()); } // float java.lang.Math.min(float, float) @@ -625,7 +625,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetAssembler()); } // double java.lang.Math.max(double, double) @@ -634,7 +634,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetAssembler()); } // float java.lang.Math.max(float, float) @@ -643,7 +643,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetAssembler()); } static void GenMinMax(LocationSummary* locations, @@ -713,7 +713,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler()); } // long java.lang.Math.min(long, long) @@ -722,7 +722,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler()); } // int java.lang.Math.max(int, int) @@ -731,7 +731,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler()); } // long java.lang.Math.max(long, long) @@ -740,7 +740,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler()); } // double java.lang.Math.sqrt(double) @@ -1045,7 +1045,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } // int sun.misc.Unsafe.getIntVolatile(Object o, long offset) @@ -1054,7 +1054,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } // long sun.misc.Unsafe.getLong(Object o, long offset) @@ -1063,7 +1063,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } // long sun.misc.Unsafe.getLongVolatile(Object o, long offset) @@ -1072,7 +1072,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } // Object sun.misc.Unsafe.getObject(Object o, long offset) @@ -1081,7 +1081,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } // Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset) @@ -1090,7 +1090,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invo } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { @@ -1151,7 +1151,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x) @@ -1160,7 +1164,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } // void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x) @@ -1169,7 +1177,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putObject(Object o, long offset, Object x) @@ -1178,7 +1190,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x) @@ -1187,7 +1203,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invok } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } // void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x) @@ -1196,7 +1216,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invo } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putLong(Object o, long offset, long x) @@ -1205,7 +1229,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x) @@ -1214,7 +1242,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } // void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x) @@ -1223,7 +1255,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -1565,7 +1601,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } // int java.lang.String.indexOf(int ch, int fromIndex) @@ -1584,7 +1620,8 @@ void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } // java.lang.String.String(byte[] bytes) diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 371588fc47..5b67cdefa3 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -138,31 +138,31 @@ static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* } void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - CreateFPToIntLocations(arena_, invoke, true); + CreateFPToIntLocations(arena_, invoke, /* is64bit */ true); } void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - CreateIntToFPLocations(arena_, invoke, true); + CreateIntToFPLocations(arena_, invoke, /* is64bit */ true); } void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - CreateFPToIntLocations(arena_, invoke, false); + CreateFPToIntLocations(arena_, invoke, /* is64bit */ false); } void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) { - CreateIntToFPLocations(arena_, invoke, false); + CreateIntToFPLocations(arena_, invoke, /* is64bit */ false); } void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -298,7 +298,7 @@ void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) { @@ -306,7 +306,7 @@ void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) { @@ -490,7 +490,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) { @@ -498,7 +498,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) { @@ -506,7 +506,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) { @@ -514,7 +514,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetAssembler()); } static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long, @@ -597,7 +597,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) { @@ -605,7 +605,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) { @@ -613,7 +613,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) { @@ -621,7 +621,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler()); } static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -1265,19 +1265,20 @@ static void GenerateStringIndexOf(HInvoke* invoke, } void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, true); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true); } void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, false); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false); } void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) { @@ -1660,42 +1661,42 @@ static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke } void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, false); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, true); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, false); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ true, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, true, true); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ true, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, false); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, true); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ true); } void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } @@ -1722,31 +1723,40 @@ static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena, } void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, true); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimInt, invoke, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, true); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimNot, invoke, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, true); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimLong, invoke, /* is_volatile */ true); } // We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86 @@ -1798,31 +1808,31 @@ static void GenUnsafePut(LocationSummary* locations, } void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type, diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 2d9f01b821..ecd129f31e 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -115,10 +115,10 @@ void IntrinsicLocationsBuilderX86_64::VisitDoubleLongBitsToDouble(HInvoke* invok } void IntrinsicCodeGeneratorX86_64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicCodeGeneratorX86_64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { @@ -129,10 +129,10 @@ void IntrinsicLocationsBuilderX86_64::VisitFloatIntBitsToFloat(HInvoke* invoke) } void IntrinsicCodeGeneratorX86_64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicCodeGeneratorX86_64::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -230,7 +230,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler(), codegen_); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathAbsFloat(HInvoke* invoke) { @@ -238,7 +238,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler(), codegen_); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler(), codegen_); } static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) { @@ -277,7 +277,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathAbsLong(HInvoke* invoke) { @@ -285,7 +285,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenMinMaxFP(LocationSummary* locations, @@ -388,7 +388,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorX86_64::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathMinFloatFloat(HInvoke* invoke) { @@ -396,7 +397,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxDoubleDouble(HInvoke* invoke) { @@ -404,7 +406,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorX86_64::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxFloatFloat(HInvoke* invoke) { @@ -412,7 +415,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetAssembler(), codegen_); } static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long, @@ -461,7 +465,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathMinLongLong(HInvoke* invoke) { @@ -469,7 +473,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxIntInt(HInvoke* invoke) { @@ -477,7 +481,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxLongLong(HInvoke* invoke) { @@ -485,7 +489,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler()); } static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -690,7 +694,7 @@ void IntrinsicCodeGeneratorX86_64::VisitMathRoundDouble(HInvoke* invoke) { __ j(kUnordered, &nan); // output = double-to-long-truncate(input) - __ cvttsd2si(out, inPlusPointFive, true); + __ cvttsd2si(out, inPlusPointFive, /* is64bit */ true); __ jmp(&done); __ Bind(&nan); @@ -1152,7 +1156,7 @@ void IntrinsicCodeGeneratorX86_64::VisitSystemArrayCopy(HInvoke* invoke) { temp2, dest, CpuRegister(kNoRegister), - false); + /* value_can_be_null */ false); __ Bind(slow_path->GetExitLabel()); } @@ -1180,8 +1184,8 @@ void IntrinsicCodeGeneratorX86_64::VisitStringCompareTo(HInvoke* invoke) { codegen_->AddSlowPath(slow_path); __ j(kEqual, slow_path->GetEntryLabel()); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pStringCompareTo), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pStringCompareTo), + /* no_rip */ true)); __ Bind(slow_path->GetExitLabel()); } @@ -1372,7 +1376,7 @@ static void GenerateStringIndexOf(HInvoke* invoke, // Ensure we have a start index >= 0; __ xorl(counter, counter); __ cmpl(start_index, Immediate(0)); - __ cmov(kGreater, counter, start_index, false); // 32-bit copy is enough. + __ cmov(kGreater, counter, start_index, /* is64bit */ false); // 32-bit copy is enough. // Move to the start of the string: string_obj + value_offset + 2 * start_index. __ leaq(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset)); @@ -1409,19 +1413,20 @@ static void GenerateStringIndexOf(HInvoke* invoke, } void IntrinsicLocationsBuilderX86_64::VisitStringIndexOf(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, true); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true); } void IntrinsicCodeGeneratorX86_64::VisitStringIndexOf(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderX86_64::VisitStringIndexOfAfter(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, false); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false); } void IntrinsicCodeGeneratorX86_64::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderX86_64::VisitStringNewStringFromBytes(HInvoke* invoke) { @@ -1446,8 +1451,8 @@ void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromBytes(HInvoke* invoke codegen_->AddSlowPath(slow_path); __ j(kEqual, slow_path->GetEntryLabel()); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromBytes), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromBytes), + /* no_rip */ true)); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); __ Bind(slow_path->GetExitLabel()); } @@ -1466,8 +1471,8 @@ void IntrinsicLocationsBuilderX86_64::VisitStringNewStringFromChars(HInvoke* inv void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromChars(HInvoke* invoke) { X86_64Assembler* assembler = GetAssembler(); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromChars), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromChars), + /* no_rip */ true)); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); } @@ -1490,8 +1495,8 @@ void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromString(HInvoke* invok codegen_->AddSlowPath(slow_path); __ j(kEqual, slow_path->GetEntryLabel()); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromString), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromString), + /* no_rip */ true)); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); __ Bind(slow_path->GetExitLabel()); } @@ -1715,7 +1720,8 @@ void IntrinsicLocationsBuilderX86_64::VisitThreadCurrentThread(HInvoke* invoke) void IntrinsicCodeGeneratorX86_64::VisitThreadCurrentThread(HInvoke* invoke) { CpuRegister out = invoke->GetLocations()->Out().AsRegister<CpuRegister>(); - GetAssembler()->gs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86_64WordSize>(), true)); + GetAssembler()->gs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86_64WordSize>(), + /* no_rip */ true)); } static void GenUnsafeGet(HInvoke* invoke, @@ -1786,22 +1792,22 @@ void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetObjectVolatile(HInvoke* invo void IntrinsicCodeGeneratorX86_64::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } @@ -1885,31 +1891,31 @@ static void GenUnsafePut(LocationSummary* locations, Primitive::Type type, bool } void IntrinsicCodeGeneratorX86_64::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type, diff --git a/compiler/optimizing/licm.cc b/compiler/optimizing/licm.cc index c38bbe3477..02befc011a 100644 --- a/compiler/optimizing/licm.cc +++ b/compiler/optimizing/licm.cc @@ -121,6 +121,8 @@ void LICM::Run() { // phi in it. if (instruction->NeedsEnvironment()) { UpdateLoopPhisIn(instruction->GetEnvironment(), loop_info); + } else { + DCHECK(!instruction->HasEnvironment()); } instruction->MoveBefore(pre_header->GetLastInstruction()); } else if (instruction->CanThrow()) { diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc index 5b89cfef5a..680f89f9b9 100644 --- a/compiler/optimizing/load_store_elimination.cc +++ b/compiler/optimizing/load_store_elimination.cc @@ -933,8 +933,9 @@ class LSEVisitor : public HGraphVisitor { }; void LoadStoreElimination::Run() { - if (graph_->IsDebuggable()) { + if (graph_->IsDebuggable() || graph_->HasTryCatch()) { // Debugger may set heap values or trigger deoptimization of callers. + // Try/catch support not implemented yet. // Skip this optimization. return; } diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 19614f11c6..9d3c88c79e 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -371,6 +371,9 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { bool HasTryCatch() const { return has_try_catch_; } void SetHasTryCatch(bool value) { has_try_catch_ = value; } + ArtMethod* GetArtMethod() const { return art_method_; } + void SetArtMethod(ArtMethod* method) { art_method_ = method; } + // Returns an instruction with the opposite boolean value from 'cond'. // The instruction has been inserted into the graph, either as a constant, or // before cursor. @@ -479,6 +482,11 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { HCurrentMethod* cached_current_method_; + // The ArtMethod this graph is for. Note that for AOT, it may be null, + // for example for methods whose declaring class could not be resolved + // (such as when the superclass could not be found). + ArtMethod* art_method_; + friend class SsaBuilder; // For caching constants. friend class SsaLivenessAnalysis; // For the linear order. ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); @@ -2462,11 +2470,15 @@ class HTryBoundary : public HTemplateInstruction<0> { // Deoptimize to interpreter, upon checking a condition. class HDeoptimize : public HTemplateInstruction<1> { public: - explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) + HDeoptimize(HInstruction* cond, uint32_t dex_pc) : HTemplateInstruction(SideEffects::None(), dex_pc) { SetRawInputAt(0, cond); } + bool CanBeMoved() const OVERRIDE { return true; } + bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { + return true; + } bool NeedsEnvironment() const OVERRIDE { return true; } bool CanThrow() const OVERRIDE { return true; } diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index cae2d3f01b..4643aed9cc 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -427,7 +427,7 @@ static void MaybeRunInliner(HGraph* graph, return; } HInliner* inliner = new (graph->GetArena()) HInliner( - graph, codegen, dex_compilation_unit, dex_compilation_unit, driver, handles, stats); + graph, graph, codegen, dex_compilation_unit, dex_compilation_unit, driver, handles, stats); HOptimization* optimizations[] = { inliner }; RunOptimizations(optimizations, arraysize(optimizations), pass_observer); @@ -531,7 +531,6 @@ static void RunOptimizations(HGraph* graph, graph, stats, "instruction_simplifier_after_bce"); InstructionSimplifier* simplify4 = new (arena) InstructionSimplifier( graph, stats, "instruction_simplifier_before_codegen"); - IntrinsicsRecognizer* intrinsics = new (arena) IntrinsicsRecognizer(graph, driver); HOptimization* optimizations1[] = { @@ -543,49 +542,30 @@ static void RunOptimizations(HGraph* graph, dce1, simplify2 }; - RunOptimizations(optimizations1, arraysize(optimizations1), pass_observer); MaybeRunInliner(graph, codegen, driver, stats, dex_compilation_unit, pass_observer, &handles); - // TODO: Update passes incompatible with try/catch so we have the same - // pipeline for all methods. - if (graph->HasTryCatch()) { - HOptimization* optimizations2[] = { - boolean_simplify, - side_effects, - gvn, - dce2, - // The codegen has a few assumptions that only the instruction simplifier - // can satisfy. For example, the code generator does not expect to see a - // HTypeConversion from a type to the same type. - simplify4, - }; - - RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer); - } else { - HOptimization* optimizations2[] = { - // BooleanSimplifier depends on the InstructionSimplifier removing - // redundant suspend checks to recognize empty blocks. - boolean_simplify, - fold2, // TODO: if we don't inline we can also skip fold2. - side_effects, - gvn, - licm, - induction, - bce, - fold3, // evaluates code generated by dynamic bce - simplify3, - lse, - dce2, - // The codegen has a few assumptions that only the instruction simplifier - // can satisfy. For example, the code generator does not expect to see a - // HTypeConversion from a type to the same type. - simplify4, - }; - - RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer); - } + HOptimization* optimizations2[] = { + // BooleanSimplifier depends on the InstructionSimplifier removing + // redundant suspend checks to recognize empty blocks. + boolean_simplify, + fold2, // TODO: if we don't inline we can also skip fold2. + side_effects, + gvn, + licm, + induction, + bce, + fold3, // evaluates code generated by dynamic bce + simplify3, + lse, + dce2, + // The codegen has a few assumptions that only the instruction simplifier + // can satisfy. For example, the code generator does not expect to see a + // HTypeConversion from a type to the same type. + simplify4, + }; + RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer); RunArchOptimizations(driver->GetInstructionSet(), graph, stats, pass_observer); AllocateRegisters(graph, codegen, pass_observer); @@ -763,8 +743,8 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, ArtMethod* art_method = compiler_driver->ResolveMethod( soa, dex_cache, loader, &dex_compilation_unit, method_idx, invoke_type); // We may not get a method, for example if its class is erroneous. - // TODO: Clean this up, the compiler driver should just pass the ArtMethod to compile. if (art_method != nullptr) { + graph->SetArtMethod(art_method); interpreter_metadata = art_method->GetQuickenedInfo(); } } @@ -948,6 +928,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, if (stack_map_data == nullptr) { return false; } + MaybeRecordStat(MethodCompilationStat::kCompiled); codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size)); const void* code = code_cache->CommitCode( self, diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h index e5ea0f576b..6296eedfb0 100644 --- a/compiler/optimizing/optimizing_compiler_stats.h +++ b/compiler/optimizing/optimizing_compiler_stats.h @@ -49,6 +49,10 @@ enum MethodCompilationStat { kNotCompiledUnsupportedIsa, kNotCompiledVerificationError, kNotCompiledVerifyAtRuntime, + kInlinedMonomorphicCall, + kMonomorphicCall, + kPolymorphicCall, + kMegamorphicCall, kLastStat }; @@ -111,6 +115,10 @@ class OptimizingCompilerStats { case kNotCompiledUnsupportedIsa : name = "NotCompiledUnsupportedIsa"; break; case kNotCompiledVerificationError : name = "NotCompiledVerificationError"; break; case kNotCompiledVerifyAtRuntime : name = "NotCompiledVerifyAtRuntime"; break; + case kInlinedMonomorphicCall: name = "InlinedMonomorphicCall"; break; + case kMonomorphicCall: name = "MonomorphicCall"; break; + case kPolymorphicCall: name = "PolymorphicCall"; break; + case kMegamorphicCall: name = "kMegamorphicCall"; break; case kLastStat: LOG(FATAL) << "invalid stat " diff --git a/compiler/utils/arm/assembler_arm.h b/compiler/utils/arm/assembler_arm.h index 4a6e6d7c3f..98a1a8f9a1 100644 --- a/compiler/utils/arm/assembler_arm.h +++ b/compiler/utils/arm/assembler_arm.h @@ -22,6 +22,7 @@ #include "base/bit_utils.h" #include "base/logging.h" +#include "base/stl_util.h" #include "base/value_object.h" #include "constants_arm.h" #include "utils/arm/managed_register_arm.h" @@ -697,10 +698,9 @@ class ArmAssembler : public Assembler { // Most of these are pure virtual as they need to be implemented per instruction set. // Create a new literal with a given value. - // NOTE: Force the template parameter to be explicitly specified. In the absence of - // std::omit_from_type_deduction<T> or std::identity<T>, use std::decay<T>. + // NOTE: Force the template parameter to be explicitly specified. template <typename T> - Literal* NewLiteral(typename std::decay<T>::type value) { + Literal* NewLiteral(typename Identity<T>::type value) { static_assert(std::is_integral<T>::value, "T must be an integral type."); return NewLiteral(sizeof(value), reinterpret_cast<const uint8_t*>(&value)); } diff --git a/compiler/utils/mips/assembler_mips.cc b/compiler/utils/mips/assembler_mips.cc index fc7ac7061a..42f21e603d 100644 --- a/compiler/utils/mips/assembler_mips.cc +++ b/compiler/utils/mips/assembler_mips.cc @@ -302,6 +302,46 @@ void MipsAssembler::Nor(Register rd, Register rs, Register rt) { EmitR(0, rs, rt, rd, 0, 0x27); } +void MipsAssembler::Movz(Register rd, Register rs, Register rt) { + CHECK(!IsR6()); + EmitR(0, rs, rt, rd, 0, 0x0A); +} + +void MipsAssembler::Movn(Register rd, Register rs, Register rt) { + CHECK(!IsR6()); + EmitR(0, rs, rt, rd, 0, 0x0B); +} + +void MipsAssembler::Seleqz(Register rd, Register rs, Register rt) { + CHECK(IsR6()); + EmitR(0, rs, rt, rd, 0, 0x35); +} + +void MipsAssembler::Selnez(Register rd, Register rs, Register rt) { + CHECK(IsR6()); + EmitR(0, rs, rt, rd, 0, 0x37); +} + +void MipsAssembler::ClzR6(Register rd, Register rs) { + CHECK(IsR6()); + EmitR(0, rs, static_cast<Register>(0), rd, 0x01, 0x10); +} + +void MipsAssembler::ClzR2(Register rd, Register rs) { + CHECK(!IsR6()); + EmitR(0x1C, rs, rd, rd, 0, 0x20); +} + +void MipsAssembler::CloR6(Register rd, Register rs) { + CHECK(IsR6()); + EmitR(0, rs, static_cast<Register>(0), rd, 0x01, 0x11); +} + +void MipsAssembler::CloR2(Register rd, Register rs) { + CHECK(!IsR6()); + EmitR(0x1C, rs, rd, rd, 0, 0x21); +} + void MipsAssembler::Seb(Register rd, Register rt) { EmitR(0x1f, static_cast<Register>(0), rt, rd, 0x10, 0x20); } @@ -314,6 +354,11 @@ void MipsAssembler::Wsbh(Register rd, Register rt) { EmitR(0x1f, static_cast<Register>(0), rt, rd, 2, 0x20); } +void MipsAssembler::Bitswap(Register rd, Register rt) { + CHECK(IsR6()); + EmitR(0x1f, static_cast<Register>(0), rt, rd, 0x0, 0x20); +} + void MipsAssembler::Sll(Register rd, Register rt, int shamt) { CHECK(IsUint<5>(shamt)) << shamt; EmitR(0, static_cast<Register>(0), rt, rd, shamt, 0x00); @@ -342,6 +387,10 @@ void MipsAssembler::Srlv(Register rd, Register rt, Register rs) { EmitR(0, rs, rt, rd, 0, 0x06); } +void MipsAssembler::Rotrv(Register rd, Register rt, Register rs) { + EmitR(0, rs, rt, rd, 1, 0x06); +} + void MipsAssembler::Srav(Register rd, Register rt, Register rs) { EmitR(0, rs, rt, rd, 0, 0x07); } diff --git a/compiler/utils/mips/assembler_mips.h b/compiler/utils/mips/assembler_mips.h index 1ef0992dac..d50b4f698e 100644 --- a/compiler/utils/mips/assembler_mips.h +++ b/compiler/utils/mips/assembler_mips.h @@ -133,9 +133,19 @@ class MipsAssembler FINAL : public Assembler { void Xori(Register rt, Register rs, uint16_t imm16); void Nor(Register rd, Register rs, Register rt); + void Movz(Register rd, Register rs, Register rt); // R2 + void Movn(Register rd, Register rs, Register rt); // R2 + void Seleqz(Register rd, Register rs, Register rt); // R6 + void Selnez(Register rd, Register rs, Register rt); // R6 + void ClzR6(Register rd, Register rs); + void ClzR2(Register rd, Register rs); + void CloR6(Register rd, Register rs); + void CloR2(Register rd, Register rs); + void Seb(Register rd, Register rt); // R2+ void Seh(Register rd, Register rt); // R2+ void Wsbh(Register rd, Register rt); // R2+ + void Bitswap(Register rd, Register rt); // R6 void Sll(Register rd, Register rt, int shamt); void Srl(Register rd, Register rt, int shamt); @@ -143,6 +153,7 @@ class MipsAssembler FINAL : public Assembler { void Sra(Register rd, Register rt, int shamt); void Sllv(Register rd, Register rt, Register rs); void Srlv(Register rd, Register rt, Register rs); + void Rotrv(Register rd, Register rt, Register rs); // R2+ void Srav(Register rd, Register rt, Register rs); void Lb(Register rt, Register rs, uint16_t imm16); |