diff options
54 files changed, 2144 insertions, 480 deletions
diff --git a/cmdline/cmdline_types.h b/cmdline/cmdline_types.h index 6c0a0e1f4f..97c672c560 100644 --- a/cmdline/cmdline_types.h +++ b/cmdline/cmdline_types.h @@ -616,6 +616,8 @@ struct CmdlineType<LogVerbosity> : CmdlineTypeParser<LogVerbosity> { log_verbosity.threads = true; } else if (verbose_options[j] == "verifier") { log_verbosity.verifier = true; + } else if (verbose_options[j] == "image") { + log_verbosity.image = true; } else { return Result::Usage(std::string("Unknown -verbose option ") + verbose_options[j]); } diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc index d0215255e8..818d50a994 100644 --- a/compiler/driver/compiler_driver.cc +++ b/compiler/driver/compiler_driver.cc @@ -730,8 +730,8 @@ void CompilerDriver::PreCompile(jobject class_loader, const std::vector<const De bool CompilerDriver::IsImageClass(const char* descriptor) const { if (!IsBootImage()) { - // NOTE: Currently unreachable, all callers check IsImage(). - return false; + // NOTE: Currently only reachable from InitImageMethodVisitor for the app image case. + return true; } else { return image_classes_->find(descriptor) != image_classes_->end(); } diff --git a/compiler/image_test.cc b/compiler/image_test.cc index 12132c0cd0..b8416751c5 100644 --- a/compiler/image_test.cc +++ b/compiler/image_test.cc @@ -155,7 +155,11 @@ void ImageTest::TestWriteRead(ImageHeader::StorageMode storage_mode) { { std::vector<const char*> dup_oat_filename(1, dup_oat->GetPath().c_str()); std::vector<const char*> dup_image_filename(1, image_file.GetFilename().c_str()); - bool success_image = writer->Write(kInvalidImageFd, dup_image_filename, dup_oat_filename); + bool success_image = writer->Write(kInvalidFd, + dup_image_filename, + kInvalidFd, + dup_oat_filename, + dup_oat_filename[0]); ASSERT_TRUE(success_image); bool success_fixup = ElfWriter::Fixup(dup_oat.get(), writer->GetOatDataBegin(dup_oat_filename[0])); @@ -292,11 +296,17 @@ TEST_F(ImageTest, ImageHeaderIsValid) { oat_data_begin, oat_data_end, oat_file_end, + /*boot_image_begin*/0U, + /*boot_image_size*/0U, + /*boot_oat_begin*/0U, + /*boot_oat_size_*/0U, sizeof(void*), /*compile_pic*/false, + /*is_pic*/false, ImageHeader::kDefaultStorageMode, /*data_size*/0u); ASSERT_TRUE(image_header.IsValid()); + ASSERT_TRUE(!image_header.IsAppImage()); char* magic = const_cast<char*>(image_header.GetMagic()); strcpy(magic, ""); // bad magic diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index d0bb201d69..72c615e4bc 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -159,17 +159,45 @@ bool ImageWriter::PrepareImageAddressSpace() { bool ImageWriter::Write(int image_fd, const std::vector<const char*>& image_filenames, - const std::vector<const char*>& oat_filenames) { + int oat_fd, + const std::vector<const char*>& oat_filenames, + const std::string& oat_location) { + // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or + // oat_filenames. CHECK(!image_filenames.empty()); + if (image_fd != kInvalidFd) { + CHECK_EQ(image_filenames.size(), 1u); + } CHECK(!oat_filenames.empty()); + if (oat_fd != kInvalidFd) { + CHECK_EQ(oat_filenames.size(), 1u); + } CHECK_EQ(image_filenames.size(), oat_filenames.size()); size_t oat_file_offset = 0; for (size_t i = 0; i < oat_filenames.size(); ++i) { const char* oat_filename = oat_filenames[i]; - std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename)); - if (oat_file.get() == nullptr) { + std::unique_ptr<File> oat_file; + + if (oat_fd != -1) { + if (strlen(oat_filename) == 0u) { + oat_file.reset(new File(oat_fd, false)); + } else { + oat_file.reset(new File(oat_fd, oat_filename, false)); + } + int length = oat_file->GetLength(); + if (length < 0) { + PLOG(ERROR) << "Oat file has negative length " << length; + return false; + } else { + // Leave the fd open since dex2oat still needs to write out the oat file with the fd. + oat_file->DisableAutoClose(); + } + } else { + oat_file.reset(OS::OpenFileReadWrite(oat_filename)); + } + if (oat_file == nullptr) { PLOG(ERROR) << "Failed to open oat file " << oat_filename; return false; } @@ -181,7 +209,7 @@ bool ImageWriter::Write(int image_fd, return false; } Runtime::Current()->GetOatFileManager().RegisterOatFile( - std::unique_ptr<const OatFile>(oat_file_)); + std::unique_ptr<const OatFile>(oat_file_)); const OatHeader& oat_header = oat_file_->GetOatHeader(); ImageInfo& image_info = GetImageInfo(oat_filename); @@ -220,8 +248,15 @@ bool ImageWriter::Write(int image_fd, SetOatChecksumFromElfFile(oat_file.get()); - if (oat_file->FlushCloseOrErase() != 0) { - LOG(ERROR) << "Failed to flush and close oat file " << oat_filename; + if (oat_fd != -1) { + // Leave fd open for caller. + if (oat_file->Flush() != 0) { + LOG(ERROR) << "Failed to flush oat file " << oat_filename << " for " << oat_location; + return false; + } + } else if (oat_file->FlushCloseOrErase() != 0) { + LOG(ERROR) << "Failed to flush and close oat file " << oat_filename + << " for " << oat_location; return false; } } @@ -238,16 +273,22 @@ bool ImageWriter::Write(int image_fd, const char* oat_filename = oat_filenames[i]; ImageInfo& image_info = GetImageInfo(oat_filename); std::unique_ptr<File> image_file; - if (image_fd != kInvalidImageFd) { - image_file.reset(new File(image_fd, image_filename, unix_file::kCheckSafeUsage)); + if (image_fd != kInvalidFd) { + if (strlen(image_filename) == 0u) { + image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage)); + } else { + LOG(ERROR) << "image fd " << image_fd << " name " << image_filename; + } } else { image_file.reset(OS::CreateEmptyFile(image_filename)); } + if (image_file == nullptr) { LOG(ERROR) << "Failed to open image file " << image_filename; return false; } - if (fchmod(image_file->Fd(), 0644) != 0) { + + if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) { PLOG(ERROR) << "Failed to make image file world readable: " << image_filename; image_file->Erase(); return EXIT_FAILURE; @@ -701,6 +742,7 @@ bool ImageWriter::ContainsBootClassLoaderNonImageClassInternal( std::unordered_set<mirror::Class*>* visited) { DCHECK(early_exit != nullptr); DCHECK(visited != nullptr); + DCHECK(compile_app_image_); if (klass == nullptr) { return false; } @@ -717,6 +759,13 @@ bool ImageWriter::ContainsBootClassLoaderNonImageClassInternal( visited->emplace(klass); bool result = IsBootClassLoaderNonImageClass(klass); bool my_early_exit = false; // Only for ourselves, ignore caller. + // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the + // app image. + if (klass->GetStatus() == mirror::Class::kStatusError) { + result = true; + } else { + CHECK(klass->GetVerifyError() == nullptr) << PrettyClass(klass); + } if (!result) { // Check interfaces since these wont be visited through VisitReferences.) mirror::IfTable* if_table = klass->GetIfTable(); @@ -727,6 +776,12 @@ bool ImageWriter::ContainsBootClassLoaderNonImageClassInternal( visited); } } + if (klass->IsObjectArrayClass()) { + result = result || ContainsBootClassLoaderNonImageClassInternal( + klass->GetComponentType(), + &my_early_exit, + visited); + } // Check static fields and their classes. size_t num_static_fields = klass->NumReferenceStaticFields(); if (num_static_fields != 0 && klass->IsResolved()) { @@ -780,7 +835,9 @@ bool ImageWriter::KeepClass(Class* klass) { if (compile_app_image_) { // For app images, we need to prune boot loader classes that are not in the boot image since // these may have already been loaded when the app image is loaded. - return !ContainsBootClassLoaderNonImageClass(klass); + // Keep classes in the boot image space since we don't want to re-resolve these. + return Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass) || + !ContainsBootClassLoaderNonImageClass(klass); } std::string temp; return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp)); @@ -843,25 +900,25 @@ void ImageWriter::PruneNonImageClasses() { for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) { ArtMethod* method = mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_); - if (method != nullptr) { - auto* declaring_class = method->GetDeclaringClass(); - // Miranda methods may be held live by a class which was not an image class but have a - // declaring class which is an image class. Set it to the resolution method to be safe and - // prevent dangling pointers. - if (method->IsMiranda() || !KeepClass(declaring_class)) { - mirror::DexCache::SetElementPtrSize(resolved_methods, - i, - resolution_method, - target_ptr_size_); - } else { - // Check that the class is still in the classes table. - DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class " - << PrettyClass(declaring_class) << " not in class linker table"; - } + DCHECK(method != nullptr) << "Expected resolution method instead of null method"; + mirror::Class* declaring_class = method->GetDeclaringClass(); + // Miranda methods may be held live by a class which was not an image class but have a + // declaring class which is an image class. Set it to the resolution method to be safe and + // prevent dangling pointers. + if (method->IsMiranda() || !KeepClass(declaring_class)) { + mirror::DexCache::SetElementPtrSize(resolved_methods, + i, + resolution_method, + target_ptr_size_); + } else { + // Check that the class is still in the classes table. + DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class " + << PrettyClass(declaring_class) << " not in class linker table"; } } + ArtField** resolved_fields = dex_cache->GetResolvedFields(); for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) { - ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_); + ArtField* field = mirror::DexCache::GetElementPtrSize(resolved_fields, i, target_ptr_size_); if (field != nullptr && !KeepClass(field->GetDeclaringClass())) { dex_cache->SetResolvedField(i, nullptr, target_ptr_size_); } @@ -906,6 +963,32 @@ void ImageWriter::DumpImageClasses() { } } +mirror::String* ImageWriter::FindInternedString(mirror::String* string) { + Thread* const self = Thread::Current(); + for (auto& pair : image_info_map_) { + const ImageInfo& image_info = pair.second; + mirror::String* const found = image_info.intern_table_->LookupStrong(self, string); + DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr) + << string->ToModifiedUtf8(); + if (found != nullptr) { + return found; + } + } + if (compile_app_image_) { + Runtime* const runtime = Runtime::Current(); + mirror::String* found = runtime->GetInternTable()->LookupStrong(self, string); + // If we found it in the runtime intern table it could either be in the boot image or interned + // during app image compilation. If it was in the boot image return that, otherwise return null + // since it belongs to another image space. + if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found)) { + return found; + } + DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr) + << string->ToModifiedUtf8(); + } + return nullptr; +} + void ImageWriter::CalculateObjectBinSlots(Object* obj) { DCHECK(obj != nullptr); // if it is a string, we want to intern it if its not interned. @@ -915,13 +998,16 @@ void ImageWriter::CalculateObjectBinSlots(Object* obj) { // we must be an interned string that was forward referenced and already assigned if (IsImageBinSlotAssigned(obj)) { - DCHECK_EQ(obj, image_info.intern_table_->InternStrongImageString(obj->AsString())); + DCHECK_EQ(obj, FindInternedString(obj->AsString())); return; } - // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since - // we are guaranteed to not have GC during image writing. - mirror::String* const interned = image_info.intern_table_->InternStrongImageString( - obj->AsString()); + // Need to check if the string is already interned in another image info so that we don't have + // the intern tables of two different images contain the same string. + mirror::String* interned = FindInternedString(obj->AsString()); + if (interned == nullptr) { + // Not in another image space, insert to our table. + interned = image_info.intern_table_->InternStrongImageString(obj->AsString()); + } if (obj != interned) { if (!IsImageBinSlotAssigned(interned)) { // interned obj is after us, allocate its location early @@ -1066,6 +1152,11 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { // Visit and assign offsets for fields and field arrays. auto* as_klass = h_obj->AsClass(); mirror::DexCache* dex_cache = as_klass->GetDexCache(); + DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError); + if (compile_app_image_) { + // Extra sanity, no boot loader classes should be left! + CHECK(!IsBootClassLoaderClass(as_klass)) << PrettyClass(as_klass); + } LengthPrefixedArray<ArtField>* fields[] = { as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(), }; @@ -1405,6 +1496,13 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end) << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end); } + // Store boot image info for app image so that we can relocate. + uint32_t boot_image_begin = 0; + uint32_t boot_image_end = 0; + uint32_t boot_oat_begin = 0; + uint32_t boot_oat_end = 0; + gc::Heap* const heap = Runtime::Current()->GetHeap(); + heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end); // Create the header, leave 0 for data size since we will fill this in as we are writing the // image. @@ -1417,8 +1515,13 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { PointerToLowMemUInt32(image_info.oat_data_begin_), PointerToLowMemUInt32(oat_data_end), PointerToLowMemUInt32(oat_file_end), + boot_image_begin, + boot_image_end - boot_image_begin, + boot_oat_begin, + boot_oat_end - boot_oat_begin, target_ptr_size_, compile_pic_, + /*is_pic*/compile_app_image_, image_storage_mode_, /*data_size*/0u); } @@ -1805,13 +1908,14 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) { FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy)); } else if (klass->IsClassLoaderClass()) { + mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy); // If src is a ClassLoader, set the class table to null so that it gets recreated by the // ClassLoader. - down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr); + copy_loader->SetClassTable(nullptr); // Also set allocator to null to be safe. The allocator is created when we create the class // table. We also never expect to unload things in the image since they are held live as // roots. - down_cast<mirror::ClassLoader*>(copy)->SetAllocator(nullptr); + copy_loader->SetAllocator(nullptr); } } FixupVisitor visitor(this, copy); @@ -1896,7 +2000,7 @@ const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const { // If we are compiling an app image, we need to use the stubs of the boot image. if (compile_app_image_) { // Use the current image pointers. - std::vector<gc::space::ImageSpace*> image_spaces = + const std::vector<gc::space::ImageSpace*>& image_spaces = Runtime::Current()->GetHeap()->GetBootImageSpaces(); DCHECK(!image_spaces.empty()); const OatFile* oat_file = image_spaces[0]->GetOatFile(); diff --git a/compiler/image_writer.h b/compiler/image_writer.h index ad690389e9..622eb1985b 100644 --- a/compiler/image_writer.h +++ b/compiler/image_writer.h @@ -49,7 +49,7 @@ class ImageSpace; class ClassTable; -static constexpr int kInvalidImageFd = -1; +static constexpr int kInvalidFd = -1; // Write a Space built during compilation for use during execution. class ImageWriter FINAL { @@ -103,11 +103,15 @@ class ImageWriter FINAL { uint8_t* GetOatFileBegin(const char* oat_filename) const; - // If image_fd is not kInvalidImageFd, then we use that for the file. Otherwise we open + // If image_fd is not kInvalidFd, then we use that for the image file. Otherwise we open // the names in image_filenames. + // If oat_fd is not kInvalidFd, then we use that for the oat file. Otherwise we open + // the names in oat_filenames. bool Write(int image_fd, const std::vector<const char*>& image_filenames, - const std::vector<const char*>& oat_filenames) + int oat_fd, + const std::vector<const char*>& oat_filenames, + const std::string& oat_location) REQUIRES(!Locks::mutator_lock_); uintptr_t GetOatDataBegin(const char* oat_filename) { @@ -447,6 +451,10 @@ class ImageWriter FINAL { const ImageInfo& GetConstImageInfo(const char* oat_filename) const; const ImageInfo& GetImageInfo(size_t index) const; + // Find an already strong interned string in the other images or in the boot image. Used to + // remove duplicates in the multi image and app image case. + mirror::String* FindInternedString(mirror::String* string) SHARED_REQUIRES(Locks::mutator_lock_); + const CompilerDriver& compiler_driver_; // Beginning target image address for the first image. diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc index c74c41f0c9..a5421622b7 100644 --- a/compiler/oat_writer.cc +++ b/compiler/oat_writer.cc @@ -725,8 +725,18 @@ class OatWriter::InitCodeMethodVisitor : public OatDexMethodVisitor { // Deduplicate code arrays if we are not producing debuggable code. bool deduped = false; + MethodReference method_ref(dex_file_, it.GetMemberIndex()); + auto method_lb = writer_->method_offset_map_.map.lower_bound(method_ref); if (debuggable_) { - quick_code_offset = NewQuickCodeOffset(compiled_method, it, thumb_offset); + if (method_lb != writer_->method_offset_map_.map.end() && + !writer_->method_offset_map_.map.key_comp()(method_ref, method_lb->first)) { + // Duplicate methods, we want the same code for both of them so that the oat writer puts + // the same code in both ArtMethods so that we do not get different oat code at runtime. + quick_code_offset = method_lb->second; + deduped = true; + } else { + quick_code_offset = NewQuickCodeOffset(compiled_method, it, thumb_offset); + } } else { auto lb = dedupe_map_.lower_bound(compiled_method); if (lb != dedupe_map_.end() && !dedupe_map_.key_comp()(compiled_method, lb->first)) { @@ -739,14 +749,12 @@ class OatWriter::InitCodeMethodVisitor : public OatDexMethodVisitor { } if (code_size != 0) { - MethodReference method_ref(dex_file_, it.GetMemberIndex()); - auto method_lb = writer_->method_offset_map_.map.lower_bound(method_ref); if (method_lb != writer_->method_offset_map_.map.end() && !writer_->method_offset_map_.map.key_comp()(method_ref, method_lb->first)) { // TODO: Should this be a hard failure? LOG(WARNING) << "Multiple definitions of " << PrettyMethod(method_ref.dex_method_index, *method_ref.dex_file) - << ((method_lb->second != quick_code_offset) ? "; OFFSET MISMATCH" : ""); + << " offsets " << method_lb->second << " " << quick_code_offset; } else { writer_->method_offset_map_.map.PutBefore(method_lb, method_ref, quick_code_offset); } @@ -958,30 +966,40 @@ class OatWriter::InitImageMethodVisitor : public OatDexMethodVisitor { } ClassLinker* linker = Runtime::Current()->GetClassLinker(); - InvokeType invoke_type = it.GetMethodInvokeType(dex_file_->GetClassDef(class_def_index_)); // Unchecked as we hold mutator_lock_ on entry. ScopedObjectAccessUnchecked soa(Thread::Current()); StackHandleScope<1> hs(soa.Self()); Handle<mirror::DexCache> dex_cache(hs.NewHandle(linker->FindDexCache( Thread::Current(), *dex_file_))); - ArtMethod* method = linker->ResolveMethod<ClassLinker::kNoICCECheckForCache>( - *dex_file_, - it.GetMemberIndex(), - dex_cache, - ScopedNullHandle<mirror::ClassLoader>(), - nullptr, - invoke_type); - if (method == nullptr) { - LOG(INTERNAL_FATAL) << "Unexpected failure to resolve a method: " - << PrettyMethod(it.GetMemberIndex(), *dex_file_, true); - soa.Self()->AssertPendingException(); - mirror::Throwable* exc = soa.Self()->GetException(); - std::string dump = exc->Dump(); - LOG(FATAL) << dump; - UNREACHABLE(); + ArtMethod* method; + if (writer_->HasBootImage()) { + const InvokeType invoke_type = it.GetMethodInvokeType( + dex_file_->GetClassDef(class_def_index_)); + method = linker->ResolveMethod<ClassLinker::kNoICCECheckForCache>( + *dex_file_, + it.GetMemberIndex(), + dex_cache, + ScopedNullHandle<mirror::ClassLoader>(), + nullptr, + invoke_type); + if (method == nullptr) { + LOG(INTERNAL_FATAL) << "Unexpected failure to resolve a method: " + << PrettyMethod(it.GetMemberIndex(), *dex_file_, true); + soa.Self()->AssertPendingException(); + mirror::Throwable* exc = soa.Self()->GetException(); + std::string dump = exc->Dump(); + LOG(FATAL) << dump; + UNREACHABLE(); + } + } else { + // Should already have been resolved by the compiler, just peek into the dex cache. + // It may not be resolved if the class failed to verify, in this case, don't set the + // entrypoint. This is not fatal since the dex cache will contain a resolution method. + method = dex_cache->GetResolvedMethod(it.GetMemberIndex(), linker->GetImagePointerSize()); } - - if (compiled_method != nullptr && compiled_method->GetQuickCode().size() != 0) { + if (method != nullptr && + compiled_method != nullptr && + compiled_method->GetQuickCode().size() != 0) { method->SetEntryPointFromQuickCompiledCodePtrSize( reinterpret_cast<void*>(offsets.code_offset_), pointer_size_); } @@ -1467,7 +1485,7 @@ size_t OatWriter::InitOatCodeDexFiles(size_t offset) { } while (false) VISIT(InitCodeMethodVisitor); - if (compiler_driver_->IsBootImage()) { + if (HasImage()) { VISIT(InitImageMethodVisitor); } diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc index 918a01b088..86f51e1131 100644 --- a/dex2oat/dex2oat.cc +++ b/dex2oat/dex2oat.cc @@ -553,7 +553,7 @@ class Dex2Oat FINAL { dump_timing_(false), dump_slow_timing_(kIsDebugBuild), swap_fd_(-1), - app_image_fd_(kInvalidImageFd), + app_image_fd_(kInvalidFd), timings_(timings) {} ~Dex2Oat() { @@ -1442,6 +1442,11 @@ class Dex2Oat FINAL { return true; } + // If we need to keep the oat file open for the image writer. + bool ShouldKeepOatFileOpen() const { + return IsImage() && oat_fd_ != kInvalidFd; + } + // Create and invoke the compiler driver. This will compile all the dex files. void Compile() { TimingLogger::ScopedTiming t("dex2oat Compile", timings_); @@ -1593,13 +1598,17 @@ class Dex2Oat FINAL { if (IsImage()) { if (app_image_ && image_base_ == 0) { - std::vector<gc::space::ImageSpace*> image_spaces = - Runtime::Current()->GetHeap()->GetBootImageSpaces(); - for (gc::space::ImageSpace* image_space : image_spaces) { + gc::Heap* const heap = Runtime::Current()->GetHeap(); + for (gc::space::ImageSpace* image_space : heap->GetBootImageSpaces()) { image_base_ = std::max(image_base_, RoundUp( reinterpret_cast<uintptr_t>(image_space->GetImageHeader().GetOatFileEnd()), kPageSize)); } + // The non moving space is right after the oat file. Put the preferred app image location + // right after the non moving space so that we ideally get a continuous immune region for + // the GC. + const size_t non_moving_space_capacity = heap->GetNonMovingSpace()->Capacity(); + image_base_ += non_moving_space_capacity; VLOG(compiler) << "App image base=" << reinterpret_cast<void*>(image_base_); } @@ -2139,9 +2148,14 @@ class Dex2Oat FINAL { REQUIRES(!Locks::mutator_lock_) { CHECK(image_writer_ != nullptr); if (!IsBootImage()) { + CHECK(image_filenames_.empty()); image_filenames_.push_back(app_image_file_name_.c_str()); } - if (!image_writer_->Write(app_image_fd_, image_filenames_, oat_filenames_)) { + if (!image_writer_->Write(app_image_fd_, + image_filenames_, + oat_fd_, + oat_filenames_, + oat_location_)) { LOG(ERROR) << "Failure during image file creation"; return false; } @@ -2423,9 +2437,14 @@ static int CompileImage(Dex2Oat& dex2oat) { return EXIT_FAILURE; } - // Close the image oat files. We always expect the output file by name, and it will be - // re-opened from the unstripped name. Note: it's easier to *flush* and close... - if (!dex2oat.FlushCloseOatFiles()) { + // Flush boot.oat. We always expect the output file by name, and it will be re-opened from the + // unstripped name. Do not close the file if we are compiling the image with an oat fd since the + // image writer will require this fd to generate the image. + if (dex2oat.ShouldKeepOatFileOpen()) { + if (!dex2oat.FlushOatFiles()) { + return EXIT_FAILURE; + } + } else if (!dex2oat.FlushCloseOatFiles()) { return EXIT_FAILURE; } diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc index d836532ed2..7e4ce917db 100644 --- a/patchoat/patchoat.cc +++ b/patchoat/patchoat.cc @@ -547,6 +547,9 @@ void PatchOat::PatchInternedStrings(const ImageHeader* image_header) { void PatchOat::PatchClassTable(const ImageHeader* image_header) { const auto& section = image_header->GetImageSection(ImageHeader::kSectionClassTable); + if (section.Size() == 0) { + return; + } // Note that we require that ReadFromMemory does not make an internal copy of the elements. // This also relies on visit roots not doing any verification which could fail after we update // the roots to be the image addresses. diff --git a/runtime/art_field-inl.h b/runtime/art_field-inl.h index 4166e22daa..3463b0d842 100644 --- a/runtime/art_field-inl.h +++ b/runtime/art_field-inl.h @@ -37,7 +37,7 @@ inline mirror::Class* ArtField::GetDeclaringClass() { GcRootSource gc_root_source(this); mirror::Class* result = declaring_class_.Read(&gc_root_source); DCHECK(result != nullptr); - DCHECK(result->IsLoaded() || result->IsErroneous()); + DCHECK(result->IsLoaded() || result->IsErroneous()) << result->GetStatus(); return result; } @@ -334,6 +334,15 @@ inline void ArtField::VisitRoots(RootVisitorType& visitor) { visitor.VisitRoot(declaring_class_.AddressWithoutBarrier()); } +template <typename Visitor> +inline void ArtField::UpdateObjects(const Visitor& visitor) { + mirror::Class* old_class = DeclaringClassRoot().Read<kWithoutReadBarrier>(); + mirror::Class* new_class = visitor(old_class); + if (old_class != new_class) { + SetDeclaringClass(new_class); + } +} + } // namespace art #endif // ART_RUNTIME_ART_FIELD_INL_H_ diff --git a/runtime/art_field.h b/runtime/art_field.h index a943a34174..ee1ba1fb53 100644 --- a/runtime/art_field.h +++ b/runtime/art_field.h @@ -190,6 +190,11 @@ class ArtField FINAL { return declaring_class_; } + // Update the declaring class with the passed in visitor. Does not use read barrier. + template <typename Visitor> + ALWAYS_INLINE void UpdateObjects(const Visitor& visitor) + SHARED_REQUIRES(Locks::mutator_lock_); + private: mirror::Class* ProxyFindSystemClass(const char* descriptor) SHARED_REQUIRES(Locks::mutator_lock_); diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h index a5f5c49068..74eb7227dc 100644 --- a/runtime/art_method-inl.h +++ b/runtime/art_method-inl.h @@ -467,6 +467,43 @@ void ArtMethod::VisitRoots(RootVisitorType& visitor, size_t pointer_size) { } } +template <typename Visitor> +inline void ArtMethod::UpdateObjectsForImageRelocation(const Visitor& visitor) { + mirror::Class* old_class = GetDeclaringClassNoBarrier(); + mirror::Class* new_class = visitor(old_class); + if (old_class != new_class) { + SetDeclaringClass(new_class); + } + ArtMethod** old_methods = GetDexCacheResolvedMethods(sizeof(void*)); + ArtMethod** new_methods = visitor(old_methods); + if (old_methods != new_methods) { + SetDexCacheResolvedMethods(new_methods, sizeof(void*)); + } + GcRoot<mirror::Class>* old_types = GetDexCacheResolvedTypes(sizeof(void*)); + GcRoot<mirror::Class>* new_types = visitor(old_types); + if (old_types != new_types) { + SetDexCacheResolvedTypes(new_types, sizeof(void*)); + } +} + +template <typename Visitor> +inline void ArtMethod::UpdateEntrypoints(const Visitor& visitor) { + if (IsNative()) { + const void* old_native_code = GetEntryPointFromJni(); + const void* new_native_code = visitor(old_native_code); + if (old_native_code != new_native_code) { + SetEntryPointFromJni(new_native_code); + } + } else { + DCHECK(GetEntryPointFromJni() == nullptr); + } + const void* old_code = GetEntryPointFromQuickCompiledCode(); + const void* new_code = visitor(old_code); + if (old_code != new_code) { + SetEntryPointFromQuickCompiledCode(new_code); + } +} + } // namespace art #endif // ART_RUNTIME_ART_METHOD_INL_H_ diff --git a/runtime/art_method.h b/runtime/art_method.h index 0be2fa20ac..440e796f46 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -477,6 +477,17 @@ class ArtMethod FINAL { // Returns whether the method has any compiled code, JIT or AOT. bool HasAnyCompiledCode() SHARED_REQUIRES(Locks::mutator_lock_); + + // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation. + // Does not use read barrier. + template <typename Visitor> + ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor) + SHARED_REQUIRES(Locks::mutator_lock_); + + // Update entry points by passing them through the visitor. + template <typename Visitor> + ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor); + protected: // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses". // The class we are a part of. diff --git a/runtime/base/logging.h b/runtime/base/logging.h index 115c26073d..de46b0c118 100644 --- a/runtime/base/logging.h +++ b/runtime/base/logging.h @@ -53,6 +53,7 @@ struct LogVerbosity { bool third_party_jni; // Enabled with "-verbose:third-party-jni". bool threads; bool verifier; + bool image; }; // Global log verbosity setting, initialized by InitLogging. diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index ed833c4335..ff38394038 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -336,6 +336,10 @@ bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b // Use the pointer size from the runtime since we are probably creating the image. image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet()); + if (!ValidPointerSize(image_pointer_size_)) { + *error_msg = StringPrintf("Invalid image pointer size: %zu", image_pointer_size_); + return false; + } // java_lang_Class comes first, it's needed for AllocClass // The GC can't handle an object with a null class since we can't get the size of this object. @@ -489,7 +493,7 @@ bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b return false; } AppendToBootClassPath(self, *dex_file); - opened_dex_files_.push_back(std::move(dex_file)); + boot_dex_files_.push_back(std::move(dex_file)); } // now we can use FindSystemClass @@ -878,6 +882,7 @@ struct TrampolineCheckData { ArtMethod* m; bool error; }; + static void CheckTrampolines(mirror::Object* obj, void* arg) NO_THREAD_SAFETY_ANALYSIS { if (obj->IsClass()) { mirror::Class* klass = obj->AsClass(); @@ -896,8 +901,8 @@ static void CheckTrampolines(mirror::Object* obj, void* arg) NO_THREAD_SAFETY_AN } } -bool ClassLinker::InitFromImage(std::string* error_msg) { - VLOG(startup) << "ClassLinker::InitFromImage entering"; +bool ClassLinker::InitFromBootImage(std::string* error_msg) { + VLOG(startup) << __FUNCTION__ << " entering"; CHECK(!init_done_); Runtime* const runtime = Runtime::Current(); @@ -906,6 +911,21 @@ bool ClassLinker::InitFromImage(std::string* error_msg) { std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces(); CHECK(!spaces.empty()); image_pointer_size_ = spaces[0]->GetImageHeader().GetPointerSize(); + if (!ValidPointerSize(image_pointer_size_)) { + *error_msg = StringPrintf("Invalid image pointer size: %zu", image_pointer_size_); + return false; + } + if (!runtime->IsAotCompiler()) { + // Only the Aot compiler supports having an image with a different pointer size than the + // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart + // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps. + if (image_pointer_size_ != sizeof(void*)) { + *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu", + image_pointer_size_, + sizeof(void*)); + return false; + } + } dex_cache_boot_image_class_lookup_required_ = true; std::vector<const OatFile*> oat_files = runtime->GetOatFileManager().RegisterImageOatFiles(spaces); @@ -957,19 +977,10 @@ bool ClassLinker::InitFromImage(std::string* error_msg) { } } - StackHandleScopeCollection handles(self); - std::vector<Handle<mirror::ObjectArray<mirror::DexCache>>> dex_caches_vector; - for (gc::space::ImageSpace* space : spaces) { - Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(handles.NewHandle( - space->GetImageHeader().GetImageRoot(ImageHeader::kDexCaches)-> - AsObjectArray<mirror::DexCache>())); - dex_caches_vector.push_back(dex_caches); - } - - Handle<mirror::ObjectArray<mirror::Class>> class_roots(handles.NewHandle( - spaces[0]->GetImageHeader().GetImageRoot(ImageHeader::kClassRoots)-> - AsObjectArray<mirror::Class>())); - class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(class_roots.Get()); + class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>( + down_cast<mirror::ObjectArray<mirror::Class>*>( + spaces[0]->GetImageHeader().GetImageRoot(ImageHeader::kClassRoots))); + mirror::Class::SetClassClass(class_roots_.Read()->Get(kJavaLangClass)); // Special case of setting up the String class early so that we can test arbitrary objects // as being Strings or not @@ -982,162 +993,595 @@ bool ClassLinker::InitFromImage(std::string* error_msg) { runtime->SetSentinel(heap->AllocNonMovableObject<true>( self, java_lang_Object, java_lang_Object->GetObjectSize(), VoidFunctor())); - uint32_t dex_file_count = 0; - for (const OatFile* oat_file : oat_files) { - dex_file_count += oat_file->GetOatHeader().GetDexFileCount(); + // reinit array_iftable_ from any array class instance, they should be == + array_iftable_ = GcRoot<mirror::IfTable>(GetClassRoot(kObjectArrayClass)->GetIfTable()); + DCHECK_EQ(array_iftable_.Read(), GetClassRoot(kBooleanArrayClass)->GetIfTable()); + // String class root was set above + mirror::Field::SetClass(GetClassRoot(kJavaLangReflectField)); + mirror::Field::SetArrayClass(GetClassRoot(kJavaLangReflectFieldArrayClass)); + mirror::Constructor::SetClass(GetClassRoot(kJavaLangReflectConstructor)); + mirror::Constructor::SetArrayClass(GetClassRoot(kJavaLangReflectConstructorArrayClass)); + mirror::Method::SetClass(GetClassRoot(kJavaLangReflectMethod)); + mirror::Method::SetArrayClass(GetClassRoot(kJavaLangReflectMethodArrayClass)); + mirror::Reference::SetClass(GetClassRoot(kJavaLangRefReference)); + mirror::BooleanArray::SetArrayClass(GetClassRoot(kBooleanArrayClass)); + mirror::ByteArray::SetArrayClass(GetClassRoot(kByteArrayClass)); + mirror::CharArray::SetArrayClass(GetClassRoot(kCharArrayClass)); + mirror::DoubleArray::SetArrayClass(GetClassRoot(kDoubleArrayClass)); + mirror::FloatArray::SetArrayClass(GetClassRoot(kFloatArrayClass)); + mirror::IntArray::SetArrayClass(GetClassRoot(kIntArrayClass)); + mirror::LongArray::SetArrayClass(GetClassRoot(kLongArrayClass)); + mirror::ShortArray::SetArrayClass(GetClassRoot(kShortArrayClass)); + mirror::Throwable::SetClass(GetClassRoot(kJavaLangThrowable)); + mirror::StackTraceElement::SetClass(GetClassRoot(kJavaLangStackTraceElement)); + + for (gc::space::ImageSpace* image_space : spaces) { + // Boot class loader, use a null handle. + std::vector<std::unique_ptr<const DexFile>> dex_files; + if (!AddImageSpace(image_space, + ScopedNullHandle<mirror::ClassLoader>(), + /*dex_elements*/nullptr, + /*dex_location*/nullptr, + /*out*/&dex_files, + error_msg)) { + return false; + } + // Append opened dex files at the end. + boot_dex_files_.insert(boot_dex_files_.end(), + std::make_move_iterator(dex_files.begin()), + std::make_move_iterator(dex_files.end())); } - uint32_t dex_caches_count = 0; - for (auto dex_caches : dex_caches_vector) { - dex_caches_count += dex_caches->GetLength(); + FinishInit(self); + + VLOG(startup) << __FUNCTION__ << " exiting"; + return true; +} + +static bool IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa, + mirror::ClassLoader* class_loader) + SHARED_REQUIRES(Locks::mutator_lock_) { + return class_loader == nullptr || + class_loader->GetClass() == + soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_BootClassLoader); +} + +static mirror::String* GetDexPathListElementName(ScopedObjectAccessUnchecked& soa, + mirror::Object* element) + SHARED_REQUIRES(Locks::mutator_lock_) { + ArtField* const dex_file_field = + soa.DecodeField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile); + ArtField* const dex_file_name_field = + soa.DecodeField(WellKnownClasses::dalvik_system_DexFile_fileName); + DCHECK(dex_file_field != nullptr); + DCHECK(dex_file_name_field != nullptr); + DCHECK(element != nullptr); + CHECK_EQ(dex_file_field->GetDeclaringClass(), element->GetClass()) << PrettyTypeOf(element); + mirror::Object* dex_file = dex_file_field->GetObject(element); + if (dex_file == nullptr) { + return nullptr; } - if (dex_file_count != dex_caches_count) { - *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from " - "image"; - return false; + mirror::Object* const name_object = dex_file_name_field->GetObject(dex_file); + if (name_object != nullptr) { + return name_object->AsString(); } - for (auto dex_caches : dex_caches_vector) { - for (int32_t i = 0; i < dex_caches->GetLength(); i++) { - StackHandleScope<1> hs2(self); - Handle<mirror::DexCache> dex_cache(hs2.NewHandle(dex_caches->Get(i))); - const std::string& dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8()); - const OatFile::OatDexFile* oat_dex_file = nullptr; - for (const OatFile* oat_file : oat_files) { - const OatFile::OatDexFile* oat_dex = - oat_file->GetOatDexFile(dex_file_location.c_str(), nullptr, false); - if (oat_dex != nullptr) { - DCHECK(oat_dex_file == nullptr); - oat_dex_file = oat_dex; + return nullptr; +} + +static bool FlattenPathClassLoader(mirror::ClassLoader* class_loader, + std::list<mirror::String*>* out_dex_file_names, + std::string* error_msg) + SHARED_REQUIRES(Locks::mutator_lock_) { + DCHECK(out_dex_file_names != nullptr); + DCHECK(error_msg != nullptr); + ScopedObjectAccessUnchecked soa(Thread::Current()); + ArtField* const dex_path_list_field = + soa.DecodeField(WellKnownClasses::dalvik_system_PathClassLoader_pathList); + ArtField* const dex_elements_field = + soa.DecodeField(WellKnownClasses::dalvik_system_DexPathList_dexElements); + CHECK(dex_path_list_field != nullptr); + CHECK(dex_elements_field != nullptr); + while (!IsBootClassLoader(soa, class_loader)) { + if (class_loader->GetClass() != + soa.Decode<mirror::Class*>(WellKnownClasses::dalvik_system_PathClassLoader)) { + *error_msg = StringPrintf("Unknown class loader type %s", PrettyTypeOf(class_loader).c_str()); + // Unsupported class loader. + return false; + } + mirror::Object* dex_path_list = dex_path_list_field->GetObject(class_loader); + if (dex_path_list != nullptr) { + // DexPathList has an array dexElements of Elements[] which each contain a dex file. + mirror::Object* dex_elements_obj = dex_elements_field->GetObject(dex_path_list); + // Loop through each dalvik.system.DexPathList$Element's dalvik.system.DexFile and look + // at the mCookie which is a DexFile vector. + if (dex_elements_obj != nullptr) { + mirror::ObjectArray<mirror::Object>* dex_elements = + dex_elements_obj->AsObjectArray<mirror::Object>(); + // Reverse order since we insert the parent at the front. + for (int32_t i = dex_elements->GetLength() - 1; i >= 0; --i) { + mirror::Object* const element = dex_elements->GetWithoutChecks(i); + if (element == nullptr) { + *error_msg = StringPrintf("Null dex element at index %d", i); + return false; + } + mirror::String* const name = GetDexPathListElementName(soa, element); + if (name == nullptr) { + *error_msg = StringPrintf("Null name for dex element at index %d", i); + return false; + } + out_dex_file_names->push_front(name); } } + } + class_loader = class_loader->GetParent(); + } + return true; +} - if (oat_dex_file == nullptr) { - *error_msg = StringPrintf("Failed finding oat dex file for %s", - dex_file_location.c_str()); - return false; +class FixupArtMethodArrayVisitor : public ArtMethodVisitor { + public: + explicit FixupArtMethodArrayVisitor(const ImageHeader& header) : header_(header) {} + + virtual void Visit(ArtMethod* method) SHARED_REQUIRES(Locks::mutator_lock_) { + GcRoot<mirror::Class>* resolved_types = method->GetDexCacheResolvedTypes(sizeof(void*)); + const bool is_miranda = method->IsMiranda(); + if (resolved_types != nullptr) { + bool in_image_space = false; + if (kIsDebugBuild || is_miranda) { + in_image_space = header_.GetImageSection(ImageHeader::kSectionDexCacheArrays).Contains( + reinterpret_cast<const uint8_t*>(resolved_types) - header_.GetImageBegin()); } - std::string inner_error_msg; - std::unique_ptr<const DexFile> dex_file = oat_dex_file->OpenDexFile(&inner_error_msg); - if (dex_file == nullptr) { - *error_msg = StringPrintf("Failed to open dex file %s error '%s'", - dex_file_location.c_str(), - inner_error_msg.c_str()); - return false; + // Must be in image space for non-miranda method. + DCHECK(is_miranda || in_image_space) + << resolved_types << " is not in image starting at " + << reinterpret_cast<void*>(header_.GetImageBegin()); + if (!is_miranda || in_image_space) { + // Go through the array so that we don't need to do a slow map lookup. + method->SetDexCacheResolvedTypes(*reinterpret_cast<GcRoot<mirror::Class>**>(resolved_types), + sizeof(void*)); } - - if (kSanityCheckObjects) { - SanityCheckArtMethodPointerArray(dex_cache->GetResolvedMethods(), - dex_cache->NumResolvedMethods(), - image_pointer_size_, - spaces); + } + ArtMethod** resolved_methods = method->GetDexCacheResolvedMethods(sizeof(void*)); + if (resolved_methods != nullptr) { + bool in_image_space = false; + if (kIsDebugBuild || is_miranda) { + in_image_space = header_.GetImageSection(ImageHeader::kSectionDexCacheArrays).Contains( + reinterpret_cast<const uint8_t*>(resolved_methods) - header_.GetImageBegin()); + } + // Must be in image space for non-miranda method. + DCHECK(is_miranda || in_image_space) + << resolved_methods << " is not in image starting at " + << reinterpret_cast<void*>(header_.GetImageBegin()); + if (!is_miranda || in_image_space) { + // Go through the array so that we don't need to do a slow map lookup. + method->SetDexCacheResolvedMethods(*reinterpret_cast<ArtMethod***>(resolved_methods), + sizeof(void*)); } + } + } - if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) { - *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x", - dex_file_location.c_str(), - dex_file->GetLocationChecksum(), - oat_dex_file->GetDexFileLocationChecksum()); - return false; + private: + const ImageHeader& header_; +}; + +class VerifyClassInTableArtMethodVisitor : public ArtMethodVisitor { + public: + explicit VerifyClassInTableArtMethodVisitor(ClassTable* table) : table_(table) {} + + virtual void Visit(ArtMethod* method) + SHARED_REQUIRES(Locks::mutator_lock_, Locks::classlinker_classes_lock_) { + mirror::Class* klass = method->GetDeclaringClass(); + if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) { + CHECK_EQ(table_->LookupByDescriptor(klass), klass) << PrettyClass(klass); + } + } + + private: + ClassTable* const table_; +}; + +void ClassLinker::UpdateAppImageClassLoadersAndDexCaches( + gc::space::ImageSpace* space, + Handle<mirror::ClassLoader> class_loader, + Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches, + bool added_class_table) { + Thread* const self = Thread::Current(); + gc::Heap* const heap = Runtime::Current()->GetHeap(); + const ImageHeader& header = space->GetImageHeader(); + // Add image classes into the class table for the class loader, and fixup the dex caches and + // class loader fields. + WriterMutexLock mu(self, *Locks::classlinker_classes_lock_); + ClassTable* table = InsertClassTableForClassLoader(class_loader.Get()); + // TODO: Store class table in the image to avoid manually adding the classes. + for (int32_t i = 0, num_dex_caches = dex_caches->GetLength(); i < num_dex_caches; i++) { + mirror::DexCache* const dex_cache = dex_caches->Get(i); + const DexFile* const dex_file = dex_cache->GetDexFile(); + // If the oat file expects the dex cache arrays to be in the BSS, then allocate there and + // copy over the arrays. + DCHECK(dex_file != nullptr); + const size_t num_strings = dex_file->NumStringIds(); + const size_t num_types = dex_file->NumTypeIds(); + const size_t num_methods = dex_file->NumMethodIds(); + const size_t num_fields = dex_file->NumFieldIds(); + CHECK_EQ(num_strings, dex_cache->NumStrings()); + CHECK_EQ(num_types, dex_cache->NumResolvedTypes()); + CHECK_EQ(num_methods, dex_cache->NumResolvedMethods()); + CHECK_EQ(num_fields, dex_cache->NumResolvedFields()); + if (dex_file->GetOatDexFile() != nullptr && + dex_file->GetOatDexFile()->GetDexCacheArrays() != nullptr) { + DexCacheArraysLayout layout(image_pointer_size_, dex_file); + uint8_t* const raw_arrays = dex_file->GetOatDexFile()->GetDexCacheArrays(); + // The space is not yet visible to the GC, we can avoid the read barriers and use + // std::copy_n. + if (num_strings != 0u) { + GcRoot<mirror::String>* const strings = + reinterpret_cast<GcRoot<mirror::String>*>(raw_arrays + layout.StringsOffset()); + for (size_t j = 0; kIsDebugBuild && j < num_strings; ++j) { + DCHECK(strings[j].IsNull()); + } + std::copy_n(dex_cache->GetStrings(), num_strings, strings); + dex_cache->SetStrings(strings); } - AppendToBootClassPath(*dex_file.get(), dex_cache); - opened_dex_files_.push_back(std::move(dex_file)); + if (num_types != 0u) { + GcRoot<mirror::Class>* const image_resolved_types = dex_cache->GetResolvedTypes(); + GcRoot<mirror::Class>* const types = + reinterpret_cast<GcRoot<mirror::Class>*>(raw_arrays + layout.TypesOffset()); + for (size_t j = 0; kIsDebugBuild && j < num_types; ++j) { + DCHECK(types[j].IsNull()); + } + std::copy_n(image_resolved_types, num_types, types); + // Store a pointer to the new location for fast ArtMethod patching without requiring map. + // This leaves random garbage at the start of the dex cache array, but nobody should ever + // read from it again. + *reinterpret_cast<GcRoot<mirror::Class>**>(image_resolved_types) = types; + dex_cache->SetResolvedTypes(types); + } + if (num_methods != 0u) { + ArtMethod** const methods = reinterpret_cast<ArtMethod**>( + raw_arrays + layout.MethodsOffset()); + ArtMethod** const image_resolved_methods = dex_cache->GetResolvedMethods(); + for (size_t j = 0; kIsDebugBuild && j < num_methods; ++j) { + DCHECK(methods[j] == nullptr); + } + std::copy_n(image_resolved_methods, num_methods, methods); + // Store a pointer to the new location for fast ArtMethod patching without requiring map. + *reinterpret_cast<ArtMethod***>(image_resolved_methods) = methods; + dex_cache->SetResolvedMethods(methods); + } + if (num_fields != 0u) { + ArtField** const fields = reinterpret_cast<ArtField**>(raw_arrays + layout.FieldsOffset()); + for (size_t j = 0; kIsDebugBuild && j < num_fields; ++j) { + DCHECK(fields[j] == nullptr); + } + std::copy_n(dex_cache->GetResolvedFields(), num_fields, fields); + dex_cache->SetResolvedFields(fields); + } } + { + WriterMutexLock mu2(self, dex_lock_); + // Make sure to do this after we update the arrays since we store the resolved types array + // in DexCacheData in RegisterDexFileLocked. We need the array pointer to be the one in the + // BSS. + mirror::DexCache* existing_dex_cache = FindDexCacheLocked(self, + *dex_file, + /*allow_failure*/true); + CHECK(existing_dex_cache == nullptr); + StackHandleScope<1> hs3(self); + RegisterDexFileLocked(*dex_file, hs3.NewHandle(dex_cache)); + } + GcRoot<mirror::Class>* const types = dex_cache->GetResolvedTypes(); + if (!added_class_table) { + for (int32_t j = 0; j < static_cast<int32_t>(num_types); j++) { + // The image space is not yet added to the heap, avoid read barriers. + mirror::Class* klass = types[j].Read<kWithoutReadBarrier>(); + if (klass != nullptr) { + DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError); + // Update the class loader from the one in the image class loader to the one that loaded + // the app image. + klass->SetClassLoader(class_loader.Get()); + // If there are multiple dex caches, there may be the same class multiple times + // in different dex caches. Check for this since inserting will add duplicates + // otherwise. + if (num_dex_caches > 1) { + mirror::Class* existing = table->LookupByDescriptor(klass); + if (existing != nullptr) { + DCHECK_EQ(existing, klass) << PrettyClass(klass); + } else { + table->Insert(klass); + } + } else { + table->Insert(klass); + } + // Double checked VLOG to avoid overhead. + if (VLOG_IS_ON(image)) { + VLOG(image) << PrettyClass(klass) << " " << klass->GetStatus(); + if (!klass->IsArrayClass()) { + VLOG(image) << "From " << klass->GetDexCache()->GetDexFile()->GetBaseLocation(); + } + VLOG(image) << "Direct methods"; + for (ArtMethod& m : klass->GetDirectMethods(sizeof(void*))) { + VLOG(image) << PrettyMethod(&m); + } + VLOG(image) << "Virtual methods"; + for (ArtMethod& m : klass->GetVirtualMethods(sizeof(void*))) { + VLOG(image) << PrettyMethod(&m); + } + } + } + } + } + if (kIsDebugBuild) { + for (int32_t j = 0; j < static_cast<int32_t>(num_types); j++) { + // The image space is not yet added to the heap, avoid read barriers. + mirror::Class* klass = types[j].Read<kWithoutReadBarrier>(); + if (klass != nullptr) { + DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError); + if (kIsDebugBuild) { + DCHECK_EQ(table->LookupByDescriptor(klass), klass); + mirror::Class* super_class = klass->GetSuperClass(); + if (super_class != nullptr && !heap->ObjectIsInBootImageSpace(super_class)) { + CHECK_EQ(table->LookupByDescriptor(super_class), super_class); + } + } + DCHECK_EQ(klass->GetClassLoader(), class_loader.Get()); + if (kIsDebugBuild) { + for (ArtMethod& m : klass->GetDirectMethods(sizeof(void*))) { + const void* code = m.GetEntryPointFromQuickCompiledCode(); + const void* oat_code = m.IsInvokable() ? GetQuickOatCodeFor(&m) : code; + if (!IsQuickResolutionStub(code) && + !IsQuickGenericJniStub(code) && + !IsQuickToInterpreterBridge(code) && + !m.IsNative()) { + DCHECK_EQ(code, oat_code) << PrettyMethod(&m); + } + } + VLOG(image) << "Virtual methods"; + for (ArtMethod& m : klass->GetVirtualMethods(sizeof(void*))) { + const void* code = m.GetEntryPointFromQuickCompiledCode(); + const void* oat_code = m.IsInvokable() ? GetQuickOatCodeFor(&m) : code; + if (!IsQuickResolutionStub(code) && + !IsQuickGenericJniStub(code) && + !IsQuickToInterpreterBridge(code) && + !m.IsNative()) { + DCHECK_EQ(code, oat_code) << PrettyMethod(&m); + } + } + } + } + } + } + } + { + FixupArtMethodArrayVisitor visitor(header); + header.GetImageSection(ImageHeader::kSectionArtMethods).VisitPackedArtMethods( + &visitor, space->Begin(), sizeof(void*)); + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(class_loader.Get()); + } + if (kIsDebugBuild) { + ClassTable* const class_table = class_loader.Get()->GetClassTable(); + VerifyClassInTableArtMethodVisitor visitor2(class_table); + header.GetImageSection(ImageHeader::kSectionArtMethods).VisitPackedArtMethods( + &visitor2, space->Begin(), sizeof(void*)); } +} - if (!ValidPointerSize(image_pointer_size_)) { - *error_msg = StringPrintf("Invalid image pointer size: %zu", image_pointer_size_); +bool ClassLinker::AddImageSpace( + gc::space::ImageSpace* space, + Handle<mirror::ClassLoader> class_loader, + jobjectArray dex_elements, + const char* dex_location, + std::vector<std::unique_ptr<const DexFile>>* out_dex_files, + std::string* error_msg) { + DCHECK(out_dex_files != nullptr); + DCHECK(error_msg != nullptr); + const uint64_t start_time = NanoTime(); + const bool app_image = class_loader.Get() != nullptr; + const ImageHeader& header = space->GetImageHeader(); + mirror::Object* dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches); + DCHECK(dex_caches_object != nullptr); + Runtime* const runtime = Runtime::Current(); + gc::Heap* const heap = runtime->GetHeap(); + Thread* const self = Thread::Current(); + StackHandleScope<2> hs(self); + Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches( + hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>())); + Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle( + header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>())); + const OatFile* oat_file = space->GetOatFile(); + std::unordered_set<mirror::ClassLoader*> image_class_loaders; + // Check that the image is what we are expecting. + if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) { + *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu", + static_cast<size_t>(space->GetImageHeader().GetPointerSize()), + image_pointer_size_); + return false; + } + DCHECK(class_roots.Get() != nullptr); + if (class_roots->GetLength() != static_cast<int32_t>(kClassRootsMax)) { + *error_msg = StringPrintf("Expected %d class roots but got %d", + class_roots->GetLength(), + static_cast<int32_t>(kClassRootsMax)); + return false; + } + // Check against existing class roots to make sure they match the ones in the boot image. + for (size_t i = 0; i < kClassRootsMax; i++) { + if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i))) { + *error_msg = "App image class roots must have pointer equality with runtime ones."; + return false; + } + } + if (oat_file->GetOatHeader().GetDexFileCount() != + static_cast<uint32_t>(dex_caches->GetLength())) { + *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from " + "image"; return false; } - // Set classes on AbstractMethod early so that IsMethod tests can be performed during the live - // bitmap walk. - if (!runtime->IsAotCompiler()) { - // Only the Aot compiler supports having an image with a different pointer size than the - // runtime. This happens on the host for compile 32 bit tests since we use a 64 bit libart - // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps. - if (image_pointer_size_ != sizeof(void*)) { - *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu", - image_pointer_size_ , - sizeof(void*)); + StackHandleScope<1> hs2(self); + MutableHandle<mirror::DexCache> h_dex_cache(hs2.NewHandle<mirror::DexCache>(nullptr)); + for (int32_t i = 0; i < dex_caches->GetLength(); i++) { + h_dex_cache.Assign(dex_caches->Get(i)); + std::string dex_file_location(h_dex_cache->GetLocation()->ToModifiedUtf8()); + // TODO: Only store qualified paths. + // If non qualified, qualify it. + if (dex_file_location.find('/') == std::string::npos) { + std::string dex_location_path = dex_location; + const size_t pos = dex_location_path.find_last_of('/'); + CHECK_NE(pos, std::string::npos); + dex_location_path = dex_location_path.substr(0, pos + 1); // Keep trailing '/' + dex_file_location = dex_location_path + dex_file_location; + } + const OatFile::OatDexFile* oat_dex_file = oat_file->GetOatDexFile(dex_file_location.c_str(), + nullptr); + if (oat_dex_file == nullptr) { + *error_msg = StringPrintf("Failed finding oat dex file for %s %s", + oat_file->GetLocation().c_str(), + dex_file_location.c_str()); + return false; + } + std::string inner_error_msg; + std::unique_ptr<const DexFile> dex_file = oat_dex_file->OpenDexFile(&inner_error_msg); + if (dex_file == nullptr) { + *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'", + dex_file_location.c_str(), + oat_file->GetLocation().c_str(), + inner_error_msg.c_str()); return false; } + + if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) { + *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x", + dex_file_location.c_str(), + dex_file->GetLocationChecksum(), + oat_dex_file->GetDexFileLocationChecksum()); + return false; + } + + if (app_image) { + // The current dex file field is bogus, overwrite it so that we can get the dex file in the + // loop below. + h_dex_cache->SetDexFile(dex_file.get()); + // Check that each class loader resolved the same way. + // TODO: Store image class loaders as image roots. + GcRoot<mirror::Class>* const types = h_dex_cache->GetResolvedTypes(); + for (int32_t j = 0, num_types = h_dex_cache->NumResolvedTypes(); j < num_types; j++) { + mirror::Class* klass = types[j].Read(); + if (klass != nullptr) { + DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError); + mirror::ClassLoader* image_class_loader = klass->GetClassLoader(); + image_class_loaders.insert(image_class_loader); + } + } + } else { + if (kSanityCheckObjects) { + SanityCheckArtMethodPointerArray(h_dex_cache->GetResolvedMethods(), + h_dex_cache->NumResolvedMethods(), + image_pointer_size_, + heap->GetBootImageSpaces()); + } + // Register dex files, keep track of existing ones that are conflicts. + AppendToBootClassPath(*dex_file.get(), h_dex_cache); + } + out_dex_files->push_back(std::move(dex_file)); + } + + if (app_image) { + ScopedObjectAccessUnchecked soa(Thread::Current()); + // Check that the class loader resolves the same way as the ones in the image. + // Image class loader [A][B][C][image dex files] + // Class loader = [???][dex_elements][image dex files] + // Need to ensure that [???][dex_elements] == [A][B][C]. + // For each class loader, PathClassLoader, the laoder checks the parent first. Also the logic + // for PathClassLoader does this by looping through the array of dex files. To ensure they + // resolve the same way, simply flatten the hierarchy in the way the resolution order would be, + // and check that the dex file names are the same. + for (mirror::ClassLoader* image_class_loader : image_class_loaders) { + std::list<mirror::String*> image_dex_file_names; + std::string temp_error_msg; + if (!FlattenPathClassLoader(image_class_loader, &image_dex_file_names, &temp_error_msg)) { + *error_msg = StringPrintf("Failed to flatten image class loader hierarchy '%s'", + temp_error_msg.c_str()); + return false; + } + std::list<mirror::String*> loader_dex_file_names; + if (!FlattenPathClassLoader(class_loader.Get(), &loader_dex_file_names, &temp_error_msg)) { + *error_msg = StringPrintf("Failed to flatten class loader hierarchy '%s'", + temp_error_msg.c_str()); + return false; + } + // Add the temporary dex path list elements at the end. + auto* elements = soa.Decode<mirror::ObjectArray<mirror::Object>*>(dex_elements); + for (size_t i = 0, num_elems = elements->GetLength(); i < num_elems; ++i) { + mirror::Object* element = elements->GetWithoutChecks(i); + if (element != nullptr) { + // If we are somewhere in the middle of the array, there may be nulls at the end. + loader_dex_file_names.push_back(GetDexPathListElementName(soa, element)); + } + } + // Ignore the number of image dex files since we are adding those to the class loader anyways. + CHECK_GE(static_cast<size_t>(image_dex_file_names.size()), + static_cast<size_t>(dex_caches->GetLength())); + size_t image_count = image_dex_file_names.size() - dex_caches->GetLength(); + // Check that the dex file names match. + bool equal = image_count == loader_dex_file_names.size(); + if (equal) { + auto it1 = image_dex_file_names.begin(); + auto it2 = loader_dex_file_names.begin(); + for (size_t i = 0; equal && i < image_count; ++i, ++it1, ++it2) { + equal = equal && (*it1)->Equals(*it2); + } + } + if (!equal) { + *error_msg = "Rejecting application image due to class loader mismatch"; + return false; + } + } } if (kSanityCheckObjects) { - for (auto dex_caches : dex_caches_vector) { - for (int32_t i = 0; i < dex_caches->GetLength(); i++) { - auto* dex_cache = dex_caches->Get(i); - for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) { - auto* field = dex_cache->GetResolvedField(j, image_pointer_size_); - if (field != nullptr) { - CHECK(field->GetDeclaringClass()->GetClass() != nullptr); - } + for (int32_t i = 0; i < dex_caches->GetLength(); i++) { + auto* dex_cache = dex_caches->Get(i); + for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) { + auto* field = dex_cache->GetResolvedField(j, image_pointer_size_); + if (field != nullptr) { + CHECK(field->GetDeclaringClass()->GetClass() != nullptr); } } } - heap->VisitObjects(SanityCheckObjectsCallback, nullptr); + if (!app_image) { + heap->VisitObjects(SanityCheckObjectsCallback, nullptr); + } } // Set entry point to interpreter if in InterpretOnly mode. if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) { - for (gc::space::ImageSpace* space : spaces) { - const ImageHeader& header = space->GetImageHeader(); - const ImageSection& methods = header.GetMethodsSection(); - SetInterpreterEntrypointArtMethodVisitor visitor(image_pointer_size_); - methods.VisitPackedArtMethods(&visitor, space->Begin(), image_pointer_size_); - } - } - - // reinit class_roots_ - mirror::Class::SetClassClass(class_roots->Get(kJavaLangClass)); - class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(class_roots.Get()); - - // reinit array_iftable_ from any array class instance, they should be == - array_iftable_ = GcRoot<mirror::IfTable>(GetClassRoot(kObjectArrayClass)->GetIfTable()); - DCHECK_EQ(array_iftable_.Read(), GetClassRoot(kBooleanArrayClass)->GetIfTable()); - // String class root was set above - mirror::Field::SetClass(GetClassRoot(kJavaLangReflectField)); - mirror::Field::SetArrayClass(GetClassRoot(kJavaLangReflectFieldArrayClass)); - mirror::Constructor::SetClass(GetClassRoot(kJavaLangReflectConstructor)); - mirror::Constructor::SetArrayClass(GetClassRoot(kJavaLangReflectConstructorArrayClass)); - mirror::Method::SetClass(GetClassRoot(kJavaLangReflectMethod)); - mirror::Method::SetArrayClass(GetClassRoot(kJavaLangReflectMethodArrayClass)); - mirror::Reference::SetClass(GetClassRoot(kJavaLangRefReference)); - mirror::BooleanArray::SetArrayClass(GetClassRoot(kBooleanArrayClass)); - mirror::ByteArray::SetArrayClass(GetClassRoot(kByteArrayClass)); - mirror::CharArray::SetArrayClass(GetClassRoot(kCharArrayClass)); - mirror::DoubleArray::SetArrayClass(GetClassRoot(kDoubleArrayClass)); - mirror::FloatArray::SetArrayClass(GetClassRoot(kFloatArrayClass)); - mirror::IntArray::SetArrayClass(GetClassRoot(kIntArrayClass)); - mirror::LongArray::SetArrayClass(GetClassRoot(kLongArrayClass)); - mirror::ShortArray::SetArrayClass(GetClassRoot(kShortArrayClass)); - mirror::Throwable::SetClass(GetClassRoot(kJavaLangThrowable)); - mirror::StackTraceElement::SetClass(GetClassRoot(kJavaLangStackTraceElement)); - - size_t class_tables_added = 0; - for (gc::space::ImageSpace* space : spaces) { - const ImageHeader& header = space->GetImageHeader(); - const ImageSection& section = header.GetImageSection(ImageHeader::kSectionClassTable); - if (section.Size() > 0u) { - WriterMutexLock mu(self, *Locks::classlinker_classes_lock_); - ClassTable* const class_table = InsertClassTableForClassLoader(nullptr); - class_table->ReadFromMemory(space->Begin() + section.Offset()); - ++class_tables_added; + const ImageSection& methods = header.GetMethodsSection(); + SetInterpreterEntrypointArtMethodVisitor visitor(image_pointer_size_); + methods.VisitPackedArtMethods(&visitor, space->Begin(), image_pointer_size_); + } + + const ImageSection& class_table_section = header.GetImageSection(ImageHeader::kSectionClassTable); + bool added_class_table = false; + if (app_image) { + GetOrCreateAllocatorForClassLoader(class_loader.Get()); // Make sure we have a linear alloc. + } + if (class_table_section.Size() > 0u) { + const uint64_t start_time2 = NanoTime(); + WriterMutexLock mu(self, *Locks::classlinker_classes_lock_); + ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get()); + class_table->ReadFromMemory(space->Begin() + class_table_section.Offset()); + if (app_image) { + class_table->SetClassLoader(class_loader.Get()); + } else { + dex_cache_boot_image_class_lookup_required_ = false; } + VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2); + added_class_table = true; } - if (class_tables_added != 0) { - // Either all of the image spaces have an empty class section or none do. In the case where - // an image space has no classes, it will still have a non-empty class section that contains - // metadata. - CHECK_EQ(spaces.size(), class_tables_added) - << "Expected non-empty class section for each image space."; - dex_cache_boot_image_class_lookup_required_ = false; + if (app_image) { + UpdateAppImageClassLoadersAndDexCaches(space, class_loader, dex_caches, added_class_table); } - - FinishInit(self); - - VLOG(startup) << "ClassLinker::InitFromImage exiting"; - + VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time); return true; } @@ -1527,14 +1971,6 @@ ClassPathEntry FindInClassPath(const char* descriptor, return ClassPathEntry(nullptr, nullptr); } -static bool IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa, - mirror::ClassLoader* class_loader) - SHARED_REQUIRES(Locks::mutator_lock_) { - return class_loader == nullptr || - class_loader->GetClass() == - soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_BootClassLoader); -} - bool ClassLinker::FindClassInPathClassLoader(ScopedObjectAccessAlreadyRunnable& soa, Thread* self, const char* descriptor, @@ -1820,6 +2256,7 @@ mirror::Class* ClassLinker::DefineClass(Thread* self, // inserted before we allocate / fill in these fields. LoadClass(self, dex_file, dex_class_def, klass); if (self->IsExceptionPending()) { + VLOG(class_linker) << self->GetException()->Dump(); // An exception occured during load, set status to erroneous while holding klass' lock in case // notification is necessary. if (!klass->IsErroneous()) { @@ -2487,7 +2924,20 @@ void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file, Thread* const self = Thread::Current(); dex_lock_.AssertExclusiveHeld(self); CHECK(dex_cache.Get() != nullptr) << dex_file.GetLocation(); - CHECK(dex_cache->GetLocation()->Equals(dex_file.GetLocation())) + // For app images, the dex cache location may be a suffix of the dex file location since the + // dex file location is an absolute path. + const size_t dex_cache_length = dex_cache->GetLocation()->GetLength(); + CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation(); + std::string dex_file_location = dex_file.GetLocation(); + CHECK_GE(dex_file_location.length(), dex_cache_length) + << dex_cache->GetLocation()->ToModifiedUtf8() << " " << dex_file.GetLocation(); + // Take suffix. + const std::string dex_file_suffix = dex_file_location.substr( + dex_file_location.length() - dex_cache_length, + dex_cache_length); + // Example dex_cache location is SettingsProvider.apk and + // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk + CHECK(dex_cache->GetLocation()->Equals(dex_file_suffix)) << dex_cache->GetLocation()->ToModifiedUtf8() << " " << dex_file.GetLocation(); // Clean up pass to remove null dex caches. // Null dex caches can occur due to class unloading and we are lazily removing null entries. @@ -6931,10 +7381,13 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self, std::vector<const DexFi ArtField* cookie_field = soa.DecodeField(WellKnownClasses::dalvik_system_DexFile_cookie); DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->GetType<false>()); + ArtField* file_name_field = soa.DecodeField(WellKnownClasses::dalvik_system_DexFile_fileName); + DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->GetType<false>()); + // Fill the elements array. int32_t index = 0; for (const DexFile* dex_file : dex_files) { - StackHandleScope<3> hs2(self); + StackHandleScope<4> hs2(self); // CreatePathClassLoader is only used by gtests. Index 0 of h_long_array is supposed to be the // oat file but we can leave it null. @@ -6949,6 +7402,11 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self, std::vector<const DexFi DCHECK(h_dex_file.Get() != nullptr); cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get()); + Handle<mirror::String> h_file_name = hs2.NewHandle( + mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str())); + DCHECK(h_file_name.Get() != nullptr); + file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get()); + Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self)); DCHECK(h_element.Get() != nullptr); element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get()); @@ -7048,6 +7506,7 @@ void ClassLinker::CleanupClassLoaders() { if (class_loader != nullptr) { ++it; } else { + VLOG(class_linker) << "Freeing class loader"; DeleteClassLoader(self, data); it = class_loaders_.erase(it); } diff --git a/runtime/class_linker.h b/runtime/class_linker.h index f1fd0c38f1..d503dd4704 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -120,11 +120,25 @@ class ClassLinker { SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!dex_lock_); - // Initialize class linker from one or more images. - bool InitFromImage(std::string* error_msg) + // Initialize class linker from one or more boot images. + bool InitFromBootImage(std::string* error_msg) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!dex_lock_); + // Add an image space to the class linker, may fix up classloader fields and dex cache fields. + // The dex files that were newly opened for the space are placed in the out argument + // out_dex_files. Returns true if the operation succeeded. + // The space must be already added to the heap before calling AddImageSpace since we need to + // properly handle read barriers and object marking. + bool AddImageSpace(gc::space::ImageSpace* space, + Handle<mirror::ClassLoader> class_loader, + jobjectArray dex_elements, + const char* dex_location, + std::vector<std::unique_ptr<const DexFile>>* out_dex_files, + std::string* error_msg) + REQUIRES(!dex_lock_) + SHARED_REQUIRES(Locks::mutator_lock_); + // Finds a class by its descriptor, loading it if necessary. // If class_loader is null, searches boot_class_path_. mirror::Class* FindClass(Thread* self, @@ -985,8 +999,16 @@ class ClassLinker { SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Locks::classlinker_classes_lock_); + void UpdateAppImageClassLoadersAndDexCaches( + gc::space::ImageSpace* space, + Handle<mirror::ClassLoader> class_loader, + Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches, + bool added_class_table) + REQUIRES(!dex_lock_) + SHARED_REQUIRES(Locks::mutator_lock_); + std::vector<const DexFile*> boot_class_path_; - std::vector<std::unique_ptr<const DexFile>> opened_dex_files_; + std::vector<std::unique_ptr<const DexFile>> boot_dex_files_; mutable ReaderWriterMutex dex_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; // JNI weak globals and side data to allow dex caches to get unloaded. We lazily delete weak diff --git a/runtime/class_table.cc b/runtime/class_table.cc index df2dbf416c..2a4f0e01af 100644 --- a/runtime/class_table.cc +++ b/runtime/class_table.cc @@ -40,6 +40,16 @@ bool ClassTable::Contains(mirror::Class* klass) { return false; } +mirror::Class* ClassTable::LookupByDescriptor(mirror::Class* klass) { + for (ClassSet& class_set : classes_) { + auto it = class_set.Find(GcRoot<mirror::Class>(klass)); + if (it != class_set.end()) { + return it->Read(); + } + } + return nullptr; +} + mirror::Class* ClassTable::UpdateClass(const char* descriptor, mirror::Class* klass, size_t hash) { // Should only be updating latest table. auto existing_it = classes_.back().FindWithHash(descriptor, hash); @@ -173,4 +183,12 @@ size_t ClassTable::ReadFromMemory(uint8_t* ptr) { return read_count; } +void ClassTable::SetClassLoader(mirror::ClassLoader* class_loader) { + for (const ClassSet& class_set : classes_) { + for (const GcRoot<mirror::Class>& root : class_set) { + root.Read()->SetClassLoader(class_loader); + } + } +} + } // namespace art diff --git a/runtime/class_table.h b/runtime/class_table.h index 911f3c22db..0b420352c3 100644 --- a/runtime/class_table.h +++ b/runtime/class_table.h @@ -84,9 +84,14 @@ class ClassTable { bool Visit(ClassVisitor* visitor) SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); + // Return the first class that matches the descriptor. Returns null if there are none. mirror::Class* Lookup(const char* descriptor, size_t hash) SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); + // Return the first class that matches the descriptor of klass. Returns null if there are none. + mirror::Class* LookupByDescriptor(mirror::Class* klass) + SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); + void Insert(mirror::Class* klass) REQUIRES(Locks::classlinker_classes_lock_) SHARED_REQUIRES(Locks::mutator_lock_); @@ -107,10 +112,17 @@ class ClassTable { // Combines all of the tables into one class set. size_t WriteToMemory(uint8_t* ptr) const SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); + + // Read a table from ptr and put it at the front of the class set. size_t ReadFromMemory(uint8_t* ptr) REQUIRES(Locks::classlinker_classes_lock_) SHARED_REQUIRES(Locks::mutator_lock_); + // Change the class loader of all the contained classes. + void SetClassLoader(mirror::ClassLoader* class_loader) + REQUIRES(Locks::classlinker_classes_lock_) + SHARED_REQUIRES(Locks::mutator_lock_); + private: class ClassDescriptorHashEquals { public: diff --git a/runtime/gc/accounting/card_table.h b/runtime/gc/accounting/card_table.h index 88a6c6c6e4..b6af90806b 100644 --- a/runtime/gc/accounting/card_table.h +++ b/runtime/gc/accounting/card_table.h @@ -115,6 +115,8 @@ class CardTable { // Resets all of the bytes in the card table to clean. void ClearCardTable(); + + // Clear a range of cards that covers start to end, start and end must be aligned to kCardSize. void ClearCardRange(uint8_t* start, uint8_t* end); // Resets all of the bytes in the card table which do not map to the image space. diff --git a/runtime/gc/accounting/space_bitmap-inl.h b/runtime/gc/accounting/space_bitmap-inl.h index 61c67f86c4..4cf5b4f643 100644 --- a/runtime/gc/accounting/space_bitmap-inl.h +++ b/runtime/gc/accounting/space_bitmap-inl.h @@ -167,8 +167,12 @@ inline bool SpaceBitmap<kAlignment>::Modify(const mirror::Object* obj) { uintptr_t* address = &bitmap_begin_[index]; uintptr_t old_word = *address; if (kSetBit) { + // Check the bit before setting the word incase we are trying to mark a read only bitmap + // like an image space bitmap. This bitmap is mapped as read only and will fault if we + // attempt to change any words. Since all of the objects are marked, this will never + // occur if we check before setting the bit. This also prevents dirty pages that would + // occur if the bitmap was read write and we did not check the bit. if ((old_word & mask) == 0) { - // Avoid dirtying the page if possible. *address = old_word | mask; } } else { diff --git a/runtime/gc/collector/immune_spaces_test.cc b/runtime/gc/collector/immune_spaces_test.cc index 4884e668c2..ea290dd07d 100644 --- a/runtime/gc/collector/immune_spaces_test.cc +++ b/runtime/gc/collector/immune_spaces_test.cc @@ -112,8 +112,13 @@ class DummyImageSpace : public space::ImageSpace { /*oat_data_begin*/PointerToLowMemUInt32(map->End()), /*oat_data_end*/PointerToLowMemUInt32(map->End() + oat_size), /*oat_file_end*/PointerToLowMemUInt32(map->End() + oat_size), + /*boot_image_begin*/0u, + /*boot_image_size*/0u, + /*boot_oat_begin*/0u, + /*boot_oat_size*/0u, /*pointer_size*/sizeof(void*), /*compile_pic*/false, + /*is_pic*/false, ImageHeader::kStorageModeUncompressed, /*storage_size*/0u); return new DummyImageSpace(map.release(), live_bitmap.release()); diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index 2fb5e349b5..8cd8d73ba5 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -273,10 +273,11 @@ Heap::Heap(size_t initial_size, std::string& image_name = image_file_names[index]; ATRACE_BEGIN("ImageSpace::Create"); std::string error_msg; - space::ImageSpace* boot_image_space = space::ImageSpace::Create(image_name.c_str(), - image_instruction_set, - index > 0, - &error_msg); + space::ImageSpace* boot_image_space = space::ImageSpace::CreateBootImage( + image_name.c_str(), + image_instruction_set, + index > 0, + &error_msg); ATRACE_END(); if (boot_image_space != nullptr) { AddSpace(boot_image_space); @@ -491,7 +492,15 @@ Heap::Heap(size_t initial_size, ATRACE_END(); // Allocate the card table. ATRACE_BEGIN("Create card table"); - card_table_.reset(accounting::CardTable::Create(heap_begin, heap_capacity)); + // We currently don't support dynamically resizing the card table. + // Since we don't know where in the low_4gb the app image will be located, make the card table + // cover the whole low_4gb. TODO: Extend the card table in AddSpace. + UNUSED(heap_capacity); + // Start at 64 KB, we can be sure there are no spaces mapped this low since the address range is + // reserved by the kernel. + static constexpr size_t kMinHeapAddress = 4 * KB; + card_table_.reset(accounting::CardTable::Create(reinterpret_cast<uint8_t*>(kMinHeapAddress), + 4 * GB - kMinHeapAddress)); CHECK(card_table_.get() != nullptr) << "Failed to create card table"; ATRACE_END(); if (foreground_collector_type_ == kCollectorTypeCC && kUseTableLookupReadBarrier) { @@ -1252,10 +1261,6 @@ space::Space* Heap::FindSpaceFromObject(const mirror::Object* obj, bool fail_ok) return FindDiscontinuousSpaceFromObject(obj, fail_ok); } -std::vector<space::ImageSpace*> Heap::GetBootImageSpaces() const { - return boot_image_spaces_; -} - void Heap::ThrowOutOfMemoryError(Thread* self, size_t byte_count, AllocatorType allocator_type) { std::ostringstream oss; size_t total_bytes_free = GetFreeMemory(); @@ -3194,7 +3199,13 @@ void Heap::ProcessCards(TimingLogger* timings, } else if (process_alloc_space_cards) { TimingLogger::ScopedTiming t2("AllocSpaceClearCards", timings); if (clear_alloc_space_cards) { - card_table_->ClearCardRange(space->Begin(), space->End()); + uint8_t* end = space->End(); + if (space->IsImageSpace()) { + // Image space end is the end of the mirror objects, it is not necessarily page or card + // aligned. Align up so that the check in ClearCardRange does not fail. + end = AlignUp(end, accounting::CardTable::kCardSize); + } + card_table_->ClearCardRange(space->Begin(), end); } else { // No mod union table for the AllocSpace. Age the cards so that the GC knows that these // cards were dirty before the GC started. @@ -3989,5 +4000,43 @@ void Heap::DisableGCForShutdown() { gc_disabled_for_shutdown_ = true; } +bool Heap::ObjectIsInBootImageSpace(mirror::Object* obj) const { + for (gc::space::ImageSpace* space : boot_image_spaces_) { + if (space->HasAddress(obj)) { + return true; + } + } + return false; +} + +void Heap::GetBootImagesSize(uint32_t* boot_image_begin, + uint32_t* boot_image_end, + uint32_t* boot_oat_begin, + uint32_t* boot_oat_end) { + DCHECK(boot_image_begin != nullptr); + DCHECK(boot_image_end != nullptr); + DCHECK(boot_oat_begin != nullptr); + DCHECK(boot_oat_end != nullptr); + *boot_image_begin = 0u; + *boot_image_end = 0u; + *boot_oat_begin = 0u; + *boot_oat_end = 0u; + for (gc::space::ImageSpace* space_ : GetBootImageSpaces()) { + const uint32_t image_begin = PointerToLowMemUInt32(space_->Begin()); + const uint32_t image_size = space_->GetImageHeader().GetImageSize(); + if (*boot_image_begin == 0 || image_begin < *boot_image_begin) { + *boot_image_begin = image_begin; + } + *boot_image_end = std::max(*boot_image_end, image_begin + image_size); + const OatFile* boot_oat_file = space_->GetOatFile(); + const uint32_t oat_begin = PointerToLowMemUInt32(boot_oat_file->Begin()); + const uint32_t oat_size = boot_oat_file->Size(); + if (*boot_oat_begin == 0 || oat_begin < *boot_oat_begin) { + *boot_oat_begin = oat_begin; + } + *boot_oat_end = std::max(*boot_oat_end, oat_begin + oat_size); + } +} + } // namespace gc } // namespace art diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h index 7b531ba322..1b7e2c9f0d 100644 --- a/runtime/gc/heap.h +++ b/runtime/gc/heap.h @@ -580,7 +580,17 @@ class Heap { void UnBindBitmaps() REQUIRES(Locks::heap_bitmap_lock_); // Returns the boot image spaces. There may be multiple boot image spaces. - std::vector<space::ImageSpace*> GetBootImageSpaces() const; + const std::vector<space::ImageSpace*>& GetBootImageSpaces() const { + return boot_image_spaces_; + } + + bool ObjectIsInBootImageSpace(mirror::Object* obj) const + SHARED_REQUIRES(Locks::mutator_lock_); + + void GetBootImagesSize(uint32_t* boot_image_begin, + uint32_t* boot_image_end, + uint32_t* boot_oat_begin, + uint32_t* boot_oat_end); // Permenantly disable moving garbage collection. void DisableMovingGc() REQUIRES(!*gc_complete_lock_); diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc index 5f6bb8ee4b..9ff3d8db75 100644 --- a/runtime/gc/space/image_space.cc +++ b/runtime/gc/space/image_space.cc @@ -475,10 +475,10 @@ static bool CheckSpace(const std::string& cache_filename, std::string* error_msg return true; } -ImageSpace* ImageSpace::Create(const char* image_location, - const InstructionSet image_isa, - bool secondary_image, - std::string* error_msg) { +ImageSpace* ImageSpace::CreateBootImage(const char* image_location, + const InstructionSet image_isa, + bool secondary_image, + std::string* error_msg) { std::string system_filename; bool has_system = false; std::string cache_filename; @@ -584,8 +584,13 @@ ImageSpace* ImageSpace::Create(const char* image_location, // assume this if we are using a relocated image (i.e. image checksum // matches) since this is only different by the offset. We need this to // make sure that host tests continue to work. - space = ImageSpace::Init(image_filename->c_str(), image_location, - !(is_system || relocated_version_used), error_msg); + // Since we are the boot image, pass null since we load the oat file from the boot image oat + // file name. + space = ImageSpace::Init(image_filename->c_str(), + image_location, + !(is_system || relocated_version_used), + /* oat_file */nullptr, + error_msg); } if (space != nullptr) { return space; @@ -646,7 +651,7 @@ ImageSpace* ImageSpace::Create(const char* image_location, // we leave Create. ScopedFlock image_lock; image_lock.Init(cache_filename.c_str(), error_msg); - space = ImageSpace::Init(cache_filename.c_str(), image_location, true, error_msg); + space = ImageSpace::Init(cache_filename.c_str(), image_location, true, nullptr, error_msg); if (space == nullptr) { *error_msg = StringPrintf("Failed to load generated image '%s': %s", cache_filename.c_str(), error_msg->c_str()); @@ -669,34 +674,494 @@ void ImageSpace::VerifyImageAllocations() { } } -ImageSpace* ImageSpace::Init(const char* image_filename, const char* image_location, - bool validate_oat_file, std::string* error_msg) { +// Helper class for relocating from one range of memory to another. +class RelocationRange { + public: + RelocationRange() = default; + RelocationRange(const RelocationRange&) = default; + RelocationRange(uintptr_t source, uintptr_t dest, uintptr_t length) + : source_(source), + dest_(dest), + length_(length) {} + + bool ContainsSource(uintptr_t address) const { + return address - source_ < length_; + } + + // Translate a source address to the destination space. + uintptr_t ToDest(uintptr_t address) const { + DCHECK(ContainsSource(address)); + return address + Delta(); + } + + // Returns the delta between the dest from the source. + off_t Delta() const { + return dest_ - source_; + } + + uintptr_t Source() const { + return source_; + } + + uintptr_t Dest() const { + return dest_; + } + + uintptr_t Length() const { + return length_; + } + + private: + const uintptr_t source_; + const uintptr_t dest_; + const uintptr_t length_; +}; + +class FixupVisitor : public ValueObject { + public: + FixupVisitor(const RelocationRange& boot_image, + const RelocationRange& boot_oat, + const RelocationRange& app_image, + const RelocationRange& app_oat) + : boot_image_(boot_image), + boot_oat_(boot_oat), + app_image_(app_image), + app_oat_(app_oat) {} + + // Return the relocated address of a heap object. + template <typename T> + ALWAYS_INLINE T* ForwardObject(T* src) const { + const uintptr_t uint_src = reinterpret_cast<uintptr_t>(src); + if (boot_image_.ContainsSource(uint_src)) { + return reinterpret_cast<T*>(boot_image_.ToDest(uint_src)); + } + if (app_image_.ContainsSource(uint_src)) { + return reinterpret_cast<T*>(app_image_.ToDest(uint_src)); + } + return src; + } + + // Return the relocated address of a code pointer (contained by an oat file). + ALWAYS_INLINE const void* ForwardCode(const void* src) const { + const uintptr_t uint_src = reinterpret_cast<uintptr_t>(src); + if (boot_oat_.ContainsSource(uint_src)) { + return reinterpret_cast<const void*>(boot_oat_.ToDest(uint_src)); + } + if (app_oat_.ContainsSource(uint_src)) { + return reinterpret_cast<const void*>(app_oat_.ToDest(uint_src)); + } + return src; + } + + protected: + // Source section. + const RelocationRange boot_image_; + const RelocationRange boot_oat_; + const RelocationRange app_image_; + const RelocationRange app_oat_; +}; + +std::ostream& operator<<(std::ostream& os, const RelocationRange& reloc) { + return os << "(" << reinterpret_cast<const void*>(reloc.Source()) << "-" + << reinterpret_cast<const void*>(reloc.Source() + reloc.Length()) << ")->(" + << reinterpret_cast<const void*>(reloc.Dest()) << "-" + << reinterpret_cast<const void*>(reloc.Dest() + reloc.Length()) << ")"; +} + +// Adapt for mirror::Class::FixupNativePointers. +class FixupObjectAdapter : public FixupVisitor { + public: + template<typename... Args> + explicit FixupObjectAdapter(Args... args) : FixupVisitor(args...) {} + + template <typename T> + T* operator()(T* obj) const { + return ForwardObject(obj); + } +}; + +class FixupClassVisitor : public FixupVisitor { + public: + template<typename... Args> + explicit FixupClassVisitor(Args... args) : FixupVisitor(args...) {} + + // The image space is contained so the GC doesn't need to know about it. Avoid requiring mutator + // lock to prevent possible pauses. + ALWAYS_INLINE void operator()(mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS { + mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>(); + DCHECK(klass != nullptr) << "Null class in image"; + // No AsClass since our fields aren't quite fixed up yet. + mirror::Class* new_klass = down_cast<mirror::Class*>(ForwardObject(klass)); + // Keep clean if possible. + if (klass != new_klass) { + obj->SetClass<kVerifyNone>(new_klass); + } + } +}; + +class FixupRootVisitor : public FixupVisitor { + public: + template<typename... Args> + explicit FixupRootVisitor(Args... args) : FixupVisitor(args...) {} + + ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + mirror::Object* ref = root->AsMirrorPtr(); + mirror::Object* new_ref = ForwardObject(ref); + if (ref != new_ref) { + root->Assign(new_ref); + } + } +}; + +class FixupObjectVisitor : public FixupVisitor { + public: + template<typename... Args> + explicit FixupObjectVisitor(Args... args) : FixupVisitor(args...) {} + + // Fix up separately since we also need to fix up method entrypoints. + ALWAYS_INLINE void VisitRootIfNonNull( + mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + + ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + + ALWAYS_INLINE void operator()(mirror::Object* obj, + MemberOffset offset, + bool is_static ATTRIBUTE_UNUSED) const + NO_THREAD_SAFETY_ANALYSIS { + // There could be overlap between ranges, we must avoid visiting the same reference twice. + // Avoid the class field since we already fixed it up in FixupClassVisitor. + if (offset.Uint32Value() != mirror::Object::ClassOffset().Uint32Value()) { + // Space is not yet added to the heap, don't do a read barrier. + mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>( + offset); + // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the + // image. + obj->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(offset, ForwardObject(ref)); + } + } + + // java.lang.ref.Reference visitor. + void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { + mirror::Object* obj = ref->GetReferent<kWithoutReadBarrier>(); + ref->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( + mirror::Reference::ReferentOffset(), + ForwardObject(obj)); + } + + ALWAYS_INLINE void operator()(mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS { + obj->VisitReferences</*visit native roots*/false, kVerifyNone, kWithoutReadBarrier>( + *this, + *this); + // We want to use our own class loader and not the one in the image. + if (obj->IsClass<kVerifyNone, kWithoutReadBarrier>()) { + mirror::Class* klass = obj->AsClass<kVerifyNone, kWithoutReadBarrier>(); + FixupObjectAdapter visitor(boot_image_, boot_oat_, app_image_, app_oat_); + klass->FixupNativePointers(klass, sizeof(void*), visitor); + // Deal with the arrays. + mirror::PointerArray* vtable = klass->GetVTable<kVerifyNone, kWithoutReadBarrier>(); + if (vtable != nullptr) { + vtable->Fixup(vtable, sizeof(void*), visitor); + } + mirror::IfTable* iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>(); + if (iftable != nullptr) { + for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) { + if (iftable->GetMethodArrayCount(i) > 0) { + mirror::PointerArray* methods = + iftable->GetMethodArray<kVerifyNone, kWithoutReadBarrier>(i); + DCHECK(methods != nullptr); + methods->Fixup(methods, sizeof(void*), visitor); + } + } + } + } + } +}; + +class ForwardObjectAdapter { + public: + ALWAYS_INLINE ForwardObjectAdapter(const FixupVisitor* visitor) : visitor_(visitor) {} + + template <typename T> + ALWAYS_INLINE T* operator()(T* src) const { + return visitor_->ForwardObject(src); + } + + private: + const FixupVisitor* const visitor_; +}; + +class ForwardCodeAdapter { + public: + ALWAYS_INLINE ForwardCodeAdapter(const FixupVisitor* visitor) : visitor_(visitor) {} + + template <typename T> + ALWAYS_INLINE T* operator()(T* src) const { + return visitor_->ForwardCode(src); + } + + private: + const FixupVisitor* const visitor_; +}; + +class FixupArtMethodVisitor : public FixupVisitor, public ArtMethodVisitor { + public: + template<typename... Args> + explicit FixupArtMethodVisitor(bool fixup_heap_objects, Args... args) + : FixupVisitor(args...), + fixup_heap_objects_(fixup_heap_objects) {} + + virtual void Visit(ArtMethod* method) NO_THREAD_SAFETY_ANALYSIS { + if (fixup_heap_objects_) { + method->UpdateObjectsForImageRelocation(ForwardObjectAdapter(this)); + } + method->UpdateEntrypoints(ForwardCodeAdapter(this)); + } + + private: + const bool fixup_heap_objects_; +}; + +class FixupArtFieldVisitor : public FixupVisitor, public ArtFieldVisitor { + public: + template<typename... Args> + explicit FixupArtFieldVisitor(Args... args) : FixupVisitor(args...) {} + + virtual void Visit(ArtField* field) NO_THREAD_SAFETY_ANALYSIS { + field->UpdateObjects(ForwardObjectAdapter(this)); + } +}; + +// Relocate an image space mapped at target_base which possibly used to be at a different base +// address. Only needs a single image space, not one for both source and destination. +// In place means modifying a single ImageSpace in place rather than relocating from one ImageSpace +// to another. +static bool RelocateInPlace(ImageHeader& image_header, + uint8_t* target_base, + accounting::ContinuousSpaceBitmap* bitmap, + const OatFile* app_oat_file, + std::string* error_msg) { + DCHECK(error_msg != nullptr); + if (!image_header.IsPic()) { + if (image_header.GetImageBegin() == target_base) { + return true; + } + *error_msg = StringPrintf("Cannot relocate non-pic image for oat file %s", + (app_oat_file != nullptr) ? app_oat_file->GetLocation().c_str() : ""); + return false; + } + // Set up sections. + uint32_t boot_image_begin = 0; + uint32_t boot_image_end = 0; + uint32_t boot_oat_begin = 0; + uint32_t boot_oat_end = 0; + gc::Heap* const heap = Runtime::Current()->GetHeap(); + heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end); + CHECK_NE(boot_image_begin, boot_image_end) + << "Can not relocate app image without boot image space"; + CHECK_NE(boot_oat_begin, boot_oat_end) << "Can not relocate app image without boot oat file"; + const uint32_t boot_image_size = boot_image_end - boot_image_begin; + const uint32_t boot_oat_size = boot_oat_end - boot_oat_begin; + const uint32_t image_header_boot_image_size = image_header.GetBootImageSize(); + const uint32_t image_header_boot_oat_size = image_header.GetBootOatSize(); + if (boot_image_size != image_header_boot_image_size) { + *error_msg = StringPrintf("Boot image size %" PRIu64 " does not match expected size %" + PRIu64, + static_cast<uint64_t>(boot_image_size), + static_cast<uint64_t>(image_header_boot_image_size)); + return false; + } + if (boot_oat_size != image_header_boot_oat_size) { + *error_msg = StringPrintf("Boot oat size %" PRIu64 " does not match expected size %" + PRIu64, + static_cast<uint64_t>(boot_oat_size), + static_cast<uint64_t>(image_header_boot_oat_size)); + return false; + } + TimingLogger logger(__FUNCTION__, true, false); + RelocationRange boot_image(image_header.GetBootImageBegin(), + boot_image_begin, + boot_image_size); + RelocationRange boot_oat(image_header.GetBootOatBegin(), + boot_oat_begin, + boot_oat_size); + RelocationRange app_image(reinterpret_cast<uintptr_t>(image_header.GetImageBegin()), + reinterpret_cast<uintptr_t>(target_base), + image_header.GetImageSize()); + // Use the oat data section since this is where the OatFile::Begin is. + RelocationRange app_oat(reinterpret_cast<uintptr_t>(image_header.GetOatDataBegin()), + // Not necessarily in low 4GB. + reinterpret_cast<uintptr_t>(app_oat_file->Begin()), + image_header.GetOatDataEnd() - image_header.GetOatDataBegin()); + VLOG(image) << "App image " << app_image; + VLOG(image) << "App oat " << app_oat; + VLOG(image) << "Boot image " << boot_image; + VLOG(image) << "Boot oat " << boot_oat; + // True if we need to fixup any heap pointers, otherwise only code pointers. + const bool fixup_image = boot_image.Delta() != 0 || app_image.Delta() != 0; + const bool fixup_code = boot_oat.Delta() != 0 || app_oat.Delta() != 0; + if (!fixup_image && !fixup_code) { + // Nothing to fix up. + return true; + } + // Need to update the image to be at the target base. + const ImageSection& objects_section = image_header.GetImageSection(ImageHeader::kSectionObjects); + uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset()); + uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End()); + // Two pass approach, fix up all classes first, then fix up non class-objects. + FixupObjectVisitor fixup_object_visitor(boot_image, boot_oat, app_image, app_oat); + if (fixup_image) { + TimingLogger::ScopedTiming timing("Fixup classes", &logger); + // Fixup class only touches app image classes, don't need the mutator lock since the space is + // not yet visible to the GC. + FixupClassVisitor fixup_class_visitor(boot_image, boot_oat, app_image, app_oat); + bitmap->VisitMarkedRange(objects_begin, objects_end, fixup_class_visitor); + // Fixup objects may read fields in the boot image, use the mutator lock here for sanity. Though + // its probably not required. + ScopedObjectAccess soa(Thread::Current()); + timing.NewTiming("Fixup objects"); + bitmap->VisitMarkedRange(objects_begin, objects_end, fixup_object_visitor); + FixupObjectAdapter fixup_adapter(boot_image, boot_oat, app_image, app_oat); + // Fixup image roots. + CHECK(app_image.ContainsSource(reinterpret_cast<uintptr_t>(image_header.GetImageRoots()))); + image_header.RelocateImageObjects(app_image.Delta()); + CHECK_EQ(image_header.GetImageBegin(), target_base); + // Fix up dex cache DexFile pointers. + auto* dex_caches = image_header.GetImageRoot(ImageHeader::kDexCaches)-> + AsObjectArray<mirror::DexCache>(); + for (int32_t i = 0, count = dex_caches->GetLength(); i < count; ++i) { + mirror::DexCache* dex_cache = dex_caches->Get(i); + // Fix up dex cache pointers. + GcRoot<mirror::String>* strings = dex_cache->GetStrings(); + if (strings != nullptr) { + GcRoot<mirror::String>* new_strings = fixup_adapter.ForwardObject(strings); + if (strings != new_strings) { + dex_cache->SetFieldPtr64<false>(mirror::DexCache::StringsOffset(), new_strings); + } + dex_cache->FixupStrings(new_strings, fixup_adapter); + } + GcRoot<mirror::Class>* types = dex_cache->GetResolvedTypes(); + if (types != nullptr) { + GcRoot<mirror::Class>* new_types = fixup_adapter.ForwardObject(types); + if (types != new_types) { + dex_cache->SetFieldPtr64<false>(mirror::DexCache::ResolvedTypesOffset(), new_types); + } + dex_cache->FixupResolvedTypes(new_types, fixup_adapter); + } + ArtMethod** methods = dex_cache->GetResolvedMethods(); + if (methods != nullptr) { + ArtMethod** new_methods = fixup_adapter.ForwardObject(methods); + if (methods != new_methods) { + dex_cache->SetFieldPtr64<false>(mirror::DexCache::ResolvedMethodsOffset(), new_methods); + } + for (size_t j = 0, num = dex_cache->NumResolvedMethods(); j != num; ++j) { + ArtMethod* orig = mirror::DexCache::GetElementPtrSize(new_methods, j, sizeof(void*)); + ArtMethod* copy = fixup_adapter.ForwardObject(orig); + if (orig != copy) { + mirror::DexCache::SetElementPtrSize(new_methods, j, copy, sizeof(void*)); + } + } + } + ArtField** fields = dex_cache->GetResolvedFields(); + if (fields != nullptr) { + ArtField** new_fields = fixup_adapter.ForwardObject(fields); + if (fields != new_fields) { + dex_cache->SetFieldPtr64<false>(mirror::DexCache::ResolvedFieldsOffset(), new_fields); + } + for (size_t j = 0, num = dex_cache->NumResolvedFields(); j != num; ++j) { + ArtField* orig = mirror::DexCache::GetElementPtrSize(new_fields, j, sizeof(void*)); + ArtField* copy = fixup_adapter.ForwardObject(orig); + if (orig != copy) { + mirror::DexCache::SetElementPtrSize(new_fields, j, copy, sizeof(void*)); + } + } + } + } + } + { + // Only touches objects in the app image, no need for mutator lock. + TimingLogger::ScopedTiming timing("Fixup methods", &logger); + FixupArtMethodVisitor method_visitor(fixup_image, boot_image, boot_oat, app_image, app_oat); + image_header.GetImageSection(ImageHeader::kSectionArtMethods).VisitPackedArtMethods( + &method_visitor, + target_base, + sizeof(void*)); + } + if (fixup_image) { + { + // Only touches objects in the app image, no need for mutator lock. + TimingLogger::ScopedTiming timing("Fixup fields", &logger); + FixupArtFieldVisitor field_visitor(boot_image, boot_oat, app_image, app_oat); + image_header.GetImageSection(ImageHeader::kSectionArtFields).VisitPackedArtFields( + &field_visitor, + target_base); + } + // In the app image case, the image methods are actually in the boot image. + image_header.RelocateImageMethods(boot_image.Delta()); + const auto& class_table_section = image_header.GetImageSection(ImageHeader::kSectionClassTable); + if (class_table_section.Size() > 0u) { + // Note that we require that ReadFromMemory does not make an internal copy of the elements. + // This also relies on visit roots not doing any verification which could fail after we update + // the roots to be the image addresses. + ScopedObjectAccess soa(Thread::Current()); + WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); + ClassTable temp_table; + temp_table.ReadFromMemory(target_base + class_table_section.Offset()); + FixupRootVisitor root_visitor(boot_image, boot_oat, app_image, app_oat); + temp_table.VisitRoots(root_visitor); + } + } + if (VLOG_IS_ON(image)) { + logger.Dump(LOG(INFO)); + } + return true; +} + +ImageSpace* ImageSpace::Init(const char* image_filename, + const char* image_location, + bool validate_oat_file, + const OatFile* oat_file, + std::string* error_msg) { CHECK(image_filename != nullptr); CHECK(image_location != nullptr); - uint64_t start_time = 0; - if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) { - start_time = NanoTime(); - LOG(INFO) << "ImageSpace::Init entering image_filename=" << image_filename; - } + TimingLogger logger(__FUNCTION__, true, false); + VLOG(image) << "ImageSpace::Init entering image_filename=" << image_filename; - std::unique_ptr<File> file(OS::OpenFileForReading(image_filename)); - if (file.get() == nullptr) { - *error_msg = StringPrintf("Failed to open '%s'", image_filename); - return nullptr; + std::unique_ptr<File> file; + { + TimingLogger::ScopedTiming timing("OpenImageFile", &logger); + file.reset(OS::OpenFileForReading(image_filename)); + if (file == nullptr) { + *error_msg = StringPrintf("Failed to open '%s'", image_filename); + return nullptr; + } } - ImageHeader image_header; - bool success = file->ReadFully(&image_header, sizeof(image_header)); - if (!success || !image_header.IsValid()) { - *error_msg = StringPrintf("Invalid image header in '%s'", image_filename); - return nullptr; + ImageHeader temp_image_header; + ImageHeader* image_header = &temp_image_header; + { + TimingLogger::ScopedTiming timing("ReadImageHeader", &logger); + bool success = file->ReadFully(image_header, sizeof(*image_header)); + if (!success || !image_header->IsValid()) { + *error_msg = StringPrintf("Invalid image header in '%s'", image_filename); + return nullptr; + } } // Check that the file is larger or equal to the header size + data size. const uint64_t image_file_size = static_cast<uint64_t>(file->GetLength()); - if (image_file_size < sizeof(ImageHeader) + image_header.GetDataSize()) { + if (image_file_size < sizeof(ImageHeader) + image_header->GetDataSize()) { *error_msg = StringPrintf("Image file truncated: %" PRIu64 " vs. %" PRIu64 ".", image_file_size, - image_header.GetDataSize()); + sizeof(ImageHeader) + image_header->GetDataSize()); return nullptr; } @@ -704,17 +1169,17 @@ ImageSpace* ImageSpace::Init(const char* image_filename, const char* image_locat LOG(INFO) << "Dumping image sections"; for (size_t i = 0; i < ImageHeader::kSectionCount; ++i) { const auto section_idx = static_cast<ImageHeader::ImageSections>(i); - auto& section = image_header.GetImageSection(section_idx); + auto& section = image_header->GetImageSection(section_idx); LOG(INFO) << section_idx << " start=" - << reinterpret_cast<void*>(image_header.GetImageBegin() + section.Offset()) << " " - << section; + << reinterpret_cast<void*>(image_header->GetImageBegin() + section.Offset()) << " " + << section; } } - const auto& bitmap_section = image_header.GetImageSection(ImageHeader::kSectionImageBitmap); + const auto& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap); // The location we want to map from is the first aligned page after the end of the stored // (possibly compressed) data. - const size_t image_bitmap_offset = RoundUp(sizeof(image_header) + image_header.GetDataSize(), + const size_t image_bitmap_offset = RoundUp(sizeof(ImageHeader) + image_header->GetDataSize(), kPageSize); const size_t end_of_bitmap = image_bitmap_offset + bitmap_section.Size(); if (end_of_bitmap != image_file_size) { @@ -724,67 +1189,84 @@ ImageSpace* ImageSpace::Init(const char* image_filename, const char* image_locat return nullptr; } + // The preferred address to map the image, null specifies any address. If we manage to map the + // image at the image begin, the amount of fixup work required is minimized. + std::vector<uint8_t*> addresses(1, image_header->GetImageBegin()); + if (image_header->IsPic()) { + // Can also map at a random low_4gb address since we can relocate in-place. + addresses.push_back(nullptr); + } + // Note: The image header is part of the image due to mmap page alignment required of offset. std::unique_ptr<MemMap> map; - if (image_header.GetStorageMode() == ImageHeader::kStorageModeUncompressed) { - map.reset(MemMap::MapFileAtAddress(image_header.GetImageBegin(), - image_header.GetImageSize(), - PROT_READ | PROT_WRITE, - MAP_PRIVATE, - file->Fd(), - 0, - /*low_4gb*/false, - /*reuse*/false, - image_filename, - error_msg)); - } else { - // Reserve output and decompress into it. - map.reset(MemMap::MapAnonymous(image_location, - image_header.GetImageBegin(), - image_header.GetImageSize(), - PROT_READ | PROT_WRITE, - /*low_4gb*/false, - /*reuse*/false, - error_msg)); - if (map != nullptr) { - const size_t stored_size = image_header.GetDataSize(); - const size_t write_offset = sizeof(image_header); // Skip the header. - std::unique_ptr<MemMap> temp_map(MemMap::MapFile(sizeof(ImageHeader) + stored_size, - PROT_READ, - MAP_PRIVATE, - file->Fd(), - /*offset*/0, - /*low_4gb*/false, - image_filename, - error_msg)); - if (temp_map == nullptr) { - DCHECK(!error_msg->empty()); - return nullptr; - } - memcpy(map->Begin(), &image_header, sizeof(image_header)); - const uint64_t start = NanoTime(); - const size_t decompressed_size = LZ4_decompress_safe( - reinterpret_cast<char*>(temp_map->Begin()) + sizeof(ImageHeader), - reinterpret_cast<char*>(map->Begin()) + write_offset, - stored_size, - map->Size()); - // TODO: VLOG(image) - VLOG(class_linker) << "Decompressing image took " << PrettyDuration(NanoTime() - start); - if (decompressed_size + sizeof(ImageHeader) != image_header.GetImageSize()) { - *error_msg = StringPrintf("Decompressed size does not match expected image size %zu vs %zu", - decompressed_size + sizeof(ImageHeader), - image_header.GetImageSize()); - return nullptr; + std::string temp_error_msg; + for (uint8_t* address : addresses) { + TimingLogger::ScopedTiming timing("MapImageFile", &logger); + // Only care about the error message for the last address in addresses. We want to avoid the + // overhead of printing the process maps if we can relocate. + std::string* out_error_msg = (address == addresses.back()) ? &temp_error_msg : nullptr; + if (image_header->GetStorageMode() == ImageHeader::kStorageModeUncompressed) { + map.reset(MemMap::MapFileAtAddress(address, + image_header->GetImageSize(), + PROT_READ | PROT_WRITE, + MAP_PRIVATE, + file->Fd(), + 0, + /*low_4gb*/true, + /*reuse*/false, + image_filename, + /*out*/out_error_msg)); + } else { + // Reserve output and decompress into it. + map.reset(MemMap::MapAnonymous(image_location, + address, + image_header->GetImageSize(), + PROT_READ | PROT_WRITE, + /*low_4gb*/true, + /*reuse*/false, + out_error_msg)); + if (map != nullptr) { + const size_t stored_size = image_header->GetDataSize(); + const size_t write_offset = sizeof(ImageHeader); // Skip the header. + std::unique_ptr<MemMap> temp_map(MemMap::MapFile(sizeof(ImageHeader) + stored_size, + PROT_READ, + MAP_PRIVATE, + file->Fd(), + /*offset*/0, + /*low_4gb*/false, + image_filename, + out_error_msg)); + if (temp_map == nullptr) { + DCHECK(!out_error_msg->empty()); + return nullptr; + } + memcpy(map->Begin(), image_header, sizeof(ImageHeader)); + const uint64_t start = NanoTime(); + const size_t decompressed_size = LZ4_decompress_safe( + reinterpret_cast<char*>(temp_map->Begin()) + sizeof(ImageHeader), + reinterpret_cast<char*>(map->Begin()) + write_offset, + stored_size, + map->Size()); + VLOG(image) << "Decompressing image took " << PrettyDuration(NanoTime() - start); + if (decompressed_size + sizeof(ImageHeader) != image_header->GetImageSize()) { + *error_msg = StringPrintf("Decompressed size does not match expected image size %zu vs %zu", + decompressed_size + sizeof(ImageHeader), + image_header->GetImageSize()); + return nullptr; + } } } + if (map != nullptr) { + break; + } } if (map == nullptr) { - DCHECK(!error_msg->empty()); + DCHECK(!temp_error_msg.empty()); + *error_msg = temp_error_msg; return nullptr; } - CHECK_EQ(image_header.GetImageBegin(), map->Begin()); - DCHECK_EQ(0, memcmp(&image_header, map->Begin(), sizeof(ImageHeader))); + DCHECK_EQ(0, memcmp(image_header, map->Begin(), sizeof(ImageHeader))); std::unique_ptr<MemMap> image_bitmap_map(MemMap::MapFileAtAddress(nullptr, bitmap_section.Size(), @@ -799,25 +1281,42 @@ ImageSpace* ImageSpace::Init(const char* image_filename, const char* image_locat *error_msg = StringPrintf("Failed to map image bitmap: %s", error_msg->c_str()); return nullptr; } - uint32_t bitmap_index = bitmap_index_.FetchAndAddSequentiallyConsistent(1); - std::string bitmap_name(StringPrintf("imagespace %s live-bitmap %u", image_filename, + // Loaded the map, use the image header from the file now in case we patch it with + // RelocateInPlace. + image_header = reinterpret_cast<ImageHeader*>(map->Begin()); + const uint32_t bitmap_index = bitmap_index_.FetchAndAddSequentiallyConsistent(1); + std::string bitmap_name(StringPrintf("imagespace %s live-bitmap %u", + image_filename, bitmap_index)); // Bitmap only needs to cover until the end of the mirror objects section. - const ImageSection& image_objects = image_header.GetImageSection(ImageHeader::kSectionObjects); - std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap( + const ImageSection& image_objects = image_header->GetImageSection(ImageHeader::kSectionObjects); + // We only want the mirror object, not the ArtFields and ArtMethods. + uint8_t* const image_end = map->Begin() + image_objects.End(); + std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap; + { + TimingLogger::ScopedTiming timing("CreateImageBitmap", &logger); + bitmap.reset( accounting::ContinuousSpaceBitmap::CreateFromMemMap( bitmap_name, image_bitmap_map.release(), reinterpret_cast<uint8_t*>(map->Begin()), image_objects.End())); - if (bitmap == nullptr) { - *error_msg = StringPrintf("Could not create bitmap '%s'", bitmap_name.c_str()); - return nullptr; + if (bitmap == nullptr) { + *error_msg = StringPrintf("Could not create bitmap '%s'", bitmap_name.c_str()); + return nullptr; + } + } + { + TimingLogger::ScopedTiming timing("RelocateImage", &logger); + if (!RelocateInPlace(*image_header, + map->Begin(), + bitmap.get(), + oat_file, + error_msg)) { + return nullptr; + } } - // We only want the mirror object, not the ArtFields and ArtMethods. - uint8_t* const image_end = - map->Begin() + image_header.GetImageSection(ImageHeader::kSectionObjects).End(); std::unique_ptr<ImageSpace> space(new ImageSpace(image_filename, image_location, map.release(), @@ -829,38 +1328,61 @@ ImageSpace* ImageSpace::Init(const char* image_filename, const char* image_locat // and ArtField::java_lang_reflect_ArtField_, which are used from // Object::SizeOf() which VerifyImageAllocations() calls, are not // set yet at this point. - - space->oat_file_.reset(space->OpenOatFile(image_filename, error_msg)); - if (space->oat_file_.get() == nullptr) { - DCHECK(!error_msg->empty()); - return nullptr; + if (oat_file == nullptr) { + TimingLogger::ScopedTiming timing("OpenOatFile", &logger); + space->oat_file_.reset(space->OpenOatFile(image_filename, error_msg)); + if (space->oat_file_ == nullptr) { + DCHECK(!error_msg->empty()); + return nullptr; + } + space->oat_file_non_owned_ = space->oat_file_.get(); + } else { + space->oat_file_non_owned_ = oat_file; } - space->oat_file_non_owned_ = space->oat_file_.get(); - if (validate_oat_file && !space->ValidateOatFile(error_msg)) { - DCHECK(!error_msg->empty()); - return nullptr; + if (validate_oat_file) { + TimingLogger::ScopedTiming timing("ValidateOatFile", &logger); + if (!space->ValidateOatFile(error_msg)) { + DCHECK(!error_msg->empty()); + return nullptr; + } } Runtime* runtime = Runtime::Current(); - runtime->SetInstructionSet(space->oat_file_->GetOatHeader().GetInstructionSet()); - if (!runtime->HasResolutionMethod()) { - runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod)); - runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod)); + // If oat_file is null, then it is the boot image space. Use oat_file_non_owned_ from the space + // to set the runtime methods. + CHECK_EQ(oat_file != nullptr, image_header->IsAppImage()); + if (image_header->IsAppImage()) { + CHECK_EQ(runtime->GetResolutionMethod(), + image_header->GetImageMethod(ImageHeader::kResolutionMethod)); + CHECK_EQ(runtime->GetImtConflictMethod(), + image_header->GetImageMethod(ImageHeader::kImtConflictMethod)); + CHECK_EQ(runtime->GetImtUnimplementedMethod(), + image_header->GetImageMethod(ImageHeader::kImtUnimplementedMethod)); + CHECK_EQ(runtime->GetCalleeSaveMethod(Runtime::kSaveAll), + image_header->GetImageMethod(ImageHeader::kCalleeSaveMethod)); + CHECK_EQ(runtime->GetCalleeSaveMethod(Runtime::kRefsOnly), + image_header->GetImageMethod(ImageHeader::kRefsOnlySaveMethod)); + CHECK_EQ(runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs), + image_header->GetImageMethod(ImageHeader::kRefsAndArgsSaveMethod)); + } else if (!runtime->HasResolutionMethod()) { + runtime->SetInstructionSet(space->oat_file_non_owned_->GetOatHeader().GetInstructionSet()); + runtime->SetResolutionMethod(image_header->GetImageMethod(ImageHeader::kResolutionMethod)); + runtime->SetImtConflictMethod(image_header->GetImageMethod(ImageHeader::kImtConflictMethod)); runtime->SetImtUnimplementedMethod( - image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod)); + image_header->GetImageMethod(ImageHeader::kImtUnimplementedMethod)); runtime->SetCalleeSaveMethod( - image_header.GetImageMethod(ImageHeader::kCalleeSaveMethod), Runtime::kSaveAll); + image_header->GetImageMethod(ImageHeader::kCalleeSaveMethod), Runtime::kSaveAll); runtime->SetCalleeSaveMethod( - image_header.GetImageMethod(ImageHeader::kRefsOnlySaveMethod), Runtime::kRefsOnly); + image_header->GetImageMethod(ImageHeader::kRefsOnlySaveMethod), Runtime::kRefsOnly); runtime->SetCalleeSaveMethod( - image_header.GetImageMethod(ImageHeader::kRefsAndArgsSaveMethod), Runtime::kRefsAndArgs); + image_header->GetImageMethod(ImageHeader::kRefsAndArgsSaveMethod), Runtime::kRefsAndArgs); } - if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) { - LOG(INFO) << "ImageSpace::Init exiting (" << PrettyDuration(NanoTime() - start_time) - << ") " << *space.get(); + VLOG(image) << "ImageSpace::Init exiting " << *space.get(); + if (VLOG_IS_ON(image)) { + logger.Dump(LOG(INFO)); } return space.release(); } @@ -1002,6 +1524,16 @@ void ImageSpace::CreateMultiImageLocations(const std::string& input_image_file_n } } +ImageSpace* ImageSpace::CreateFromAppImage(const char* image, + const OatFile* oat_file, + std::string* error_msg) { + return gc::space::ImageSpace::Init(image, + image, + /*validate_oat_file*/false, + oat_file, + /*out*/error_msg); +} + } // namespace space } // namespace gc } // namespace art diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h index 9c8e8b2014..f2f416377e 100644 --- a/runtime/gc/space/image_space.h +++ b/runtime/gc/space/image_space.h @@ -35,7 +35,7 @@ class ImageSpace : public MemMapSpace { return kSpaceTypeImageSpace; } - // Create a Space from an image file for a specified instruction + // Create a boot image space from an image file for a specified instruction // set. Cannot be used for future allocation or collected. // // Create also opens the OatFile associated with the image file so @@ -43,10 +43,16 @@ class ImageSpace : public MemMapSpace { // creation of the alloc space. The ReleaseOatFile will later be // used to transfer ownership of the OatFile to the ClassLinker when // it is initialized. - static ImageSpace* Create(const char* image, - InstructionSet image_isa, - bool secondary_image, - std::string* error_msg) + static ImageSpace* CreateBootImage(const char* image, + InstructionSet image_isa, + bool secondary_image, + std::string* error_msg) + SHARED_REQUIRES(Locks::mutator_lock_); + + // Try to open an existing app image space. + static ImageSpace* CreateFromAppImage(const char* image, + const OatFile* oat_file, + std::string* error_msg) SHARED_REQUIRES(Locks::mutator_lock_); // Reads the image header from the specified image location for the @@ -144,15 +150,17 @@ class ImageSpace : public MemMapSpace { } protected: - // Tries to initialize an ImageSpace from the given image path, - // returning null on error. + // Tries to initialize an ImageSpace from the given image path, returning null on error. // - // If validate_oat_file is false (for /system), do not verify that - // image's OatFile is up-to-date relative to its DexFile - // inputs. Otherwise (for /data), validate the inputs and generate - // the OatFile in /data/dalvik-cache if necessary. - static ImageSpace* Init(const char* image_filename, const char* image_location, - bool validate_oat_file, std::string* error_msg) + // If validate_oat_file is false (for /system), do not verify that image's OatFile is up-to-date + // relative to its DexFile inputs. Otherwise (for /data), validate the inputs and generate the + // OatFile in /data/dalvik-cache if necessary. If the oat_file is null, it uses the oat file from + // the image. + static ImageSpace* Init(const char* image_filename, + const char* image_location, + bool validate_oat_file, + const OatFile* oat_file, + std::string* error_msg) SHARED_REQUIRES(Locks::mutator_lock_); OatFile* OpenOatFile(const char* image, std::string* error_msg) const diff --git a/runtime/image.cc b/runtime/image.cc index 4254d949d9..2fed4d3bd3 100644 --- a/runtime/image.cc +++ b/runtime/image.cc @@ -35,8 +35,13 @@ ImageHeader::ImageHeader(uint32_t image_begin, uint32_t oat_data_begin, uint32_t oat_data_end, uint32_t oat_file_end, + uint32_t boot_image_begin, + uint32_t boot_image_size, + uint32_t boot_oat_begin, + uint32_t boot_oat_size, uint32_t pointer_size, bool compile_pic, + bool is_pic, StorageMode storage_mode, size_t data_size) : image_begin_(image_begin), @@ -46,10 +51,15 @@ ImageHeader::ImageHeader(uint32_t image_begin, oat_data_begin_(oat_data_begin), oat_data_end_(oat_data_end), oat_file_end_(oat_file_end), + boot_image_begin_(boot_image_begin), + boot_image_size_(boot_image_size), + boot_oat_begin_(boot_oat_begin), + boot_oat_size_(boot_oat_size), patch_delta_(0), image_roots_(image_roots), pointer_size_(pointer_size), compile_pic_(compile_pic), + is_pic_(is_pic), storage_mode_(storage_mode), data_size_(data_size) { CHECK_EQ(image_begin, RoundUp(image_begin, kPageSize)); @@ -67,13 +77,21 @@ ImageHeader::ImageHeader(uint32_t image_begin, void ImageHeader::RelocateImage(off_t delta) { CHECK_ALIGNED(delta, kPageSize) << " patch delta must be page aligned"; - image_begin_ += delta; oat_file_begin_ += delta; oat_data_begin_ += delta; oat_data_end_ += delta; oat_file_end_ += delta; - image_roots_ += delta; patch_delta_ += delta; + RelocateImageObjects(delta); + RelocateImageMethods(delta); +} + +void ImageHeader::RelocateImageObjects(off_t delta) { + image_begin_ += delta; + image_roots_ += delta; +} + +void ImageHeader::RelocateImageMethods(off_t delta) { for (size_t i = 0; i < kImageMethodsCount; ++i) { image_methods_[i] += delta; } diff --git a/runtime/image.h b/runtime/image.h index 7418f660aa..b3f177bc8f 100644 --- a/runtime/image.h +++ b/runtime/image.h @@ -93,10 +93,15 @@ class PACKED(4) ImageHeader { oat_data_begin_(0U), oat_data_end_(0U), oat_file_end_(0U), + boot_image_begin_(0U), + boot_image_size_(0U), + boot_oat_begin_(0U), + boot_oat_size_(0U), patch_delta_(0), image_roots_(0U), pointer_size_(0U), compile_pic_(0), + is_pic_(0), storage_mode_(kDefaultStorageMode), data_size_(0) {} @@ -109,8 +114,13 @@ class PACKED(4) ImageHeader { uint32_t oat_data_begin, uint32_t oat_data_end, uint32_t oat_file_end, + uint32_t boot_image_begin, + uint32_t boot_image_size, + uint32_t boot_oat_begin, + uint32_t boot_oat_size, uint32_t pointer_size, bool compile_pic, + bool is_pic, StorageMode storage_mode, size_t data_size); @@ -208,11 +218,33 @@ class PACKED(4) ImageHeader { SHARED_REQUIRES(Locks::mutator_lock_); void RelocateImage(off_t delta); + void RelocateImageMethods(off_t delta); + void RelocateImageObjects(off_t delta); bool CompilePic() const { return compile_pic_ != 0; } + bool IsPic() const { + return is_pic_ != 0; + } + + uint32_t GetBootImageBegin() const { + return boot_image_begin_; + } + + uint32_t GetBootImageSize() const { + return boot_image_size_; + } + + uint32_t GetBootOatBegin() const { + return boot_oat_begin_; + } + + uint32_t GetBootOatSize() const { + return boot_oat_size_; + } + StorageMode GetStorageMode() const { return storage_mode_; } @@ -221,6 +253,12 @@ class PACKED(4) ImageHeader { return data_size_; } + bool IsAppImage() const { + // App images currently require a boot image, if the size is non zero then it is an app image + // header. + return boot_image_size_ != 0u; + } + private: static const uint8_t kImageMagic[4]; static const uint8_t kImageVersion[4]; @@ -250,6 +288,16 @@ class PACKED(4) ImageHeader { // .so files. Used for positioning a following alloc spaces. uint32_t oat_file_end_; + // Boot image begin and end (app image headers only). + uint32_t boot_image_begin_; + uint32_t boot_image_size_; + + // Boot oat begin and end (app image headers only). + uint32_t boot_oat_begin_; + uint32_t boot_oat_size_; + + // TODO: We should probably insert a boot image checksum for app images. + // The total delta that this image has been patched. int32_t patch_delta_; @@ -262,10 +310,15 @@ class PACKED(4) ImageHeader { // Boolean (0 or 1) to denote if the image was compiled with --compile-pic option const uint32_t compile_pic_; + // Boolean (0 or 1) to denote if the image can be mapped at a random address, this only refers to + // the .art file. Currently, app oat files do not depend on their app image. There are no pointers + // from the app oat code to the app image. + const uint32_t is_pic_; + // Image section sizes/offsets correspond to the uncompressed form. ImageSection sections_[kSectionCount]; - // Image methods. + // Image methods, may be inside of the boot image for app images. uint64_t image_methods_[kImageMethodsCount]; // Storage method for the image, the image may be compressed. diff --git a/runtime/intern_table.cc b/runtime/intern_table.cc index 015bf98e38..96854dad0e 100644 --- a/runtime/intern_table.cc +++ b/runtime/intern_table.cc @@ -86,14 +86,24 @@ void InternTable::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) { // Note: we deliberately don't visit the weak_interns_ table and the immutable image roots. } -mirror::String* InternTable::LookupStrong(mirror::String* s) { - return strong_interns_.Find(s); +mirror::String* InternTable::LookupWeak(Thread* self, mirror::String* s) { + MutexLock mu(self, *Locks::intern_table_lock_); + return LookupWeakLocked(s); +} + +mirror::String* InternTable::LookupStrong(Thread* self, mirror::String* s) { + MutexLock mu(self, *Locks::intern_table_lock_); + return LookupStrongLocked(s); } -mirror::String* InternTable::LookupWeak(mirror::String* s) { +mirror::String* InternTable::LookupWeakLocked(mirror::String* s) { return weak_interns_.Find(s); } +mirror::String* InternTable::LookupStrongLocked(mirror::String* s) { + return strong_interns_.Find(s); +} + void InternTable::AddNewTable() { MutexLock mu(Thread::Current(), *Locks::intern_table_lock_); weak_interns_.AddNewTable(); @@ -169,7 +179,7 @@ void InternTable::AddImagesStringsToTable(const std::vector<gc::space::ImageSpac for (size_t j = 0; j < num_strings; ++j) { mirror::String* image_string = dex_cache->GetResolvedString(j); if (image_string != nullptr) { - mirror::String* found = LookupStrong(image_string); + mirror::String* found = LookupStrongLocked(image_string); if (found == nullptr) { InsertStrong(image_string); } else { @@ -250,7 +260,7 @@ mirror::String* InternTable::Insert(mirror::String* s, bool is_strong, bool hold } } // Check the strong table for a match. - mirror::String* strong = LookupStrong(s); + mirror::String* strong = LookupStrongLocked(s); if (strong != nullptr) { return strong; } @@ -272,7 +282,7 @@ mirror::String* InternTable::Insert(mirror::String* s, bool is_strong, bool hold CHECK(self->GetWeakRefAccessEnabled()); } // There is no match in the strong table, check the weak table. - mirror::String* weak = LookupWeak(s); + mirror::String* weak = LookupWeakLocked(s); if (weak != nullptr) { if (is_strong) { // A match was found in the weak table. Promote to the strong table. @@ -317,8 +327,7 @@ mirror::String* InternTable::InternWeak(mirror::String* s) { } bool InternTable::ContainsWeak(mirror::String* s) { - MutexLock mu(Thread::Current(), *Locks::intern_table_lock_); - return LookupWeak(s) == s; + return LookupWeak(Thread::Current(), s) == s; } void InternTable::SweepInternTableWeaks(IsMarkedVisitor* visitor) { diff --git a/runtime/intern_table.h b/runtime/intern_table.h index 2b2176efe1..274f5ade5e 100644 --- a/runtime/intern_table.h +++ b/runtime/intern_table.h @@ -84,10 +84,22 @@ class InternTable { bool ContainsWeak(mirror::String* s) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Locks::intern_table_lock_); + // Lookup a strong intern, returns null if not found. + mirror::String* LookupStrong(Thread* self, mirror::String* s) + REQUIRES(!Locks::intern_table_lock_) + SHARED_REQUIRES(Locks::mutator_lock_); + + // Lookup a weak intern, returns null if not found. + mirror::String* LookupWeak(Thread* self, mirror::String* s) + REQUIRES(!Locks::intern_table_lock_) + SHARED_REQUIRES(Locks::mutator_lock_); + // Total number of interned strings. size_t Size() const REQUIRES(!Locks::intern_table_lock_); + // Total number of weakly live interned strings. size_t StrongSize() const REQUIRES(!Locks::intern_table_lock_); + // Total number of strongly live interned strings. size_t WeakSize() const REQUIRES(!Locks::intern_table_lock_); @@ -186,9 +198,9 @@ class InternTable { mirror::String* Insert(mirror::String* s, bool is_strong, bool holding_locks) REQUIRES(!Locks::intern_table_lock_) SHARED_REQUIRES(Locks::mutator_lock_); - mirror::String* LookupStrong(mirror::String* s) + mirror::String* LookupStrongLocked(mirror::String* s) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_); - mirror::String* LookupWeak(mirror::String* s) + mirror::String* LookupWeakLocked(mirror::String* s) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_); mirror::String* InsertStrong(mirror::String* s) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_); diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h index 53118e07e1..75a3f1aa05 100644 --- a/runtime/mirror/class-inl.h +++ b/runtime/mirror/class-inl.h @@ -47,11 +47,15 @@ inline uint32_t Class::GetObjectSize() { return GetField32(ObjectSizeOffset()); } +template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption> inline Class* Class::GetSuperClass() { // Can only get super class for loaded classes (hack for when runtime is // initializing) - DCHECK(IsLoaded() || IsErroneous() || !Runtime::Current()->IsStarted()) << IsLoaded(); - return GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(Class, super_class_)); + DCHECK(IsLoaded<kVerifyFlags>() || + IsErroneous<kVerifyFlags>() || + !Runtime::Current()->IsStarted()) << IsLoaded(); + return GetFieldObject<Class, kVerifyFlags, kReadBarrierOption>( + OFFSET_OF_OBJECT_MEMBER(Class, super_class_)); } inline ClassLoader* Class::GetClassLoader() { @@ -226,9 +230,12 @@ inline ArtMethod* Class::GetVirtualMethodUnchecked(size_t i, size_t pointer_size return &GetVirtualMethodsSliceUnchecked(pointer_size).At(i); } +template<VerifyObjectFlags kVerifyFlags, + ReadBarrierOption kReadBarrierOption> inline PointerArray* Class::GetVTable() { - DCHECK(IsResolved() || IsErroneous()); - return GetFieldObject<PointerArray>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_)); + DCHECK(IsResolved<kVerifyFlags>() || IsErroneous<kVerifyFlags>()); + return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>( + OFFSET_OF_OBJECT_MEMBER(Class, vtable_)); } inline PointerArray* Class::GetVTableDuringLinking() { @@ -499,8 +506,11 @@ inline ArtMethod* Class::FindVirtualMethodForVirtualOrInterface(ArtMethod* metho return FindVirtualMethodForVirtual(method, pointer_size); } +template<VerifyObjectFlags kVerifyFlags, + ReadBarrierOption kReadBarrierOption> inline IfTable* Class::GetIfTable() { - return GetFieldObject<IfTable>(OFFSET_OF_OBJECT_MEMBER(Class, iftable_)); + return GetFieldObject<IfTable, kVerifyFlags, kReadBarrierOption>( + OFFSET_OF_OBJECT_MEMBER(Class, iftable_)); } inline int32_t Class::GetIfTableCount() { @@ -516,7 +526,7 @@ inline void Class::SetIfTable(IfTable* new_iftable) { } inline LengthPrefixedArray<ArtField>* Class::GetIFieldsPtr() { - DCHECK(IsLoaded() || IsErroneous()); + DCHECK(IsLoaded() || IsErroneous()) << GetStatus(); return GetFieldPtr<LengthPrefixedArray<ArtField>*>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_)); } @@ -747,9 +757,12 @@ inline uint32_t Class::ComputeClassSize(bool has_embedded_tables, return size; } -template <typename Visitor> +template <bool kVisitNativeRoots, + VerifyObjectFlags kVerifyFlags, + ReadBarrierOption kReadBarrierOption, + typename Visitor> inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) { - VisitInstanceFieldsReferences(klass, visitor); + VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor); // Right after a class is allocated, but not yet loaded // (kStatusNotReady, see ClassLinker::LoadClass()), GC may find it // and scan it. IsTemp() may call Class::GetAccessFlags() but may @@ -757,14 +770,16 @@ inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) // status is kStatusNotReady. To avoid it, rely on IsResolved() // only. This is fine because a temp class never goes into the // kStatusResolved state. - if (IsResolved()) { + if (IsResolved<kVerifyFlags>()) { // Temp classes don't ever populate imt/vtable or static fields and they are not even // allocated with the right size for those. Also, unresolved classes don't have fields // linked yet. - VisitStaticFieldsReferences(this, visitor); + VisitStaticFieldsReferences<kVerifyFlags, kReadBarrierOption>(this, visitor); + } + if (kVisitNativeRoots) { + // Since this class is reachable, we must also visit the associated roots when we scan it. + VisitNativeRoots(visitor, Runtime::Current()->GetClassLinker()->GetImagePointerSize()); } - // Since this class is reachable, we must also visit the associated roots when we scan it. - VisitNativeRoots(visitor, Runtime::Current()->GetClassLinker()->GetImagePointerSize()); } template<ReadBarrierOption kReadBarrierOption> diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h index 6b5ed910ba..3017820ea1 100644 --- a/runtime/mirror/class.h +++ b/runtime/mirror/class.h @@ -494,10 +494,11 @@ class MANAGED Class FINAL : public Object { (IsAbstract() && IsArrayClass()); } - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> bool IsObjectArrayClass() SHARED_REQUIRES(Locks::mutator_lock_) { - return GetComponentType<kVerifyFlags>() != nullptr && - !GetComponentType<kVerifyFlags>()->IsPrimitive(); + mirror::Class* const component_type = GetComponentType<kVerifyFlags, kReadBarrierOption>(); + return component_type != nullptr && !component_type->IsPrimitive(); } template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> @@ -656,6 +657,8 @@ class MANAGED Class FINAL : public Object { // to themselves. Classes for primitive types may not assign to each other. ALWAYS_INLINE bool IsAssignableFrom(Class* src) SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> ALWAYS_INLINE Class* GetSuperClass() SHARED_REQUIRES(Locks::mutator_lock_); // Get first common super class. It will never return null. @@ -791,6 +794,8 @@ class MANAGED Class FINAL : public Object { ArtMethod* GetVirtualMethodDuringLinking(size_t i, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> ALWAYS_INLINE PointerArray* GetVTable() SHARED_REQUIRES(Locks::mutator_lock_); ALWAYS_INLINE PointerArray* GetVTableDuringLinking() SHARED_REQUIRES(Locks::mutator_lock_); @@ -941,6 +946,8 @@ class MANAGED Class FINAL : public Object { ALWAYS_INLINE int32_t GetIfTableCount() SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> ALWAYS_INLINE IfTable* GetIfTable() SHARED_REQUIRES(Locks::mutator_lock_); ALWAYS_INLINE void SetIfTable(IfTable* new_iftable) SHARED_REQUIRES(Locks::mutator_lock_); @@ -1226,7 +1233,8 @@ class MANAGED Class FINAL : public Object { // Fix up all of the native pointers in the class by running them through the visitor. Only sets // the corresponding entry in dest if visitor(obj) != obj to prevent dirty memory. Dest should be - // initialized to a copy of *this to prevent issues. + // initialized to a copy of *this to prevent issues. Does not visit the ArtMethod and ArtField + // roots. template <typename Visitor> void FixupNativePointers(mirror::Class* dest, size_t pointer_size, const Visitor& visitor) SHARED_REQUIRES(Locks::mutator_lock_); @@ -1277,7 +1285,10 @@ class MANAGED Class FINAL : public Object { static MemberOffset EmbeddedImTableOffset(size_t pointer_size); static MemberOffset EmbeddedVTableOffset(size_t pointer_size); - template <typename Visitor> + template <bool kVisitNativeRoots, + VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier, + typename Visitor> void VisitReferences(mirror::Class* klass, const Visitor& visitor) SHARED_REQUIRES(Locks::mutator_lock_); diff --git a/runtime/mirror/class_loader-inl.h b/runtime/mirror/class_loader-inl.h index e22ddd7e90..84fa80f023 100644 --- a/runtime/mirror/class_loader-inl.h +++ b/runtime/mirror/class_loader-inl.h @@ -25,15 +25,20 @@ namespace art { namespace mirror { -template <VerifyObjectFlags kVerifyFlags, typename Visitor> +template <bool kVisitClasses, + VerifyObjectFlags kVerifyFlags, + ReadBarrierOption kReadBarrierOption, + typename Visitor> inline void ClassLoader::VisitReferences(mirror::Class* klass, const Visitor& visitor) { // Visit instance fields first. - VisitInstanceFieldsReferences(klass, visitor); - // Visit classes loaded after. - ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - ClassTable* const class_table = GetClassTable(); - if (class_table != nullptr) { - class_table->VisitRoots(visitor); + VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor); + if (kVisitClasses) { + // Visit classes loaded after. + ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); + ClassTable* const class_table = GetClassTable(); + if (class_table != nullptr) { + class_table->VisitRoots(visitor); + } } } diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h index c2a65d62e2..1957e13e36 100644 --- a/runtime/mirror/class_loader.h +++ b/runtime/mirror/class_loader.h @@ -63,7 +63,10 @@ class MANAGED ClassLoader : public Object { private: // Visit instance fields of the class loader as well as its associated classes. // Null class loader is handled by ClassLinker::VisitClassRoots. - template <VerifyObjectFlags kVerifyFlags, typename Visitor> + template <bool kVisitClasses, + VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier, + typename Visitor> void VisitReferences(mirror::Class* klass, const Visitor& visitor) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Locks::classlinker_classes_lock_); diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h index 975af61ca8..2ecc9fb1a8 100644 --- a/runtime/mirror/dex_cache-inl.h +++ b/runtime/mirror/dex_cache-inl.h @@ -122,18 +122,23 @@ inline void DexCache::SetElementPtrSize(PtrType* ptr_array, } } -template <VerifyObjectFlags kVerifyFlags, typename Visitor> +template <bool kVisitNativeRoots, + VerifyObjectFlags kVerifyFlags, + ReadBarrierOption kReadBarrierOption, + typename Visitor> inline void DexCache::VisitReferences(mirror::Class* klass, const Visitor& visitor) { // Visit instance fields first. - VisitInstanceFieldsReferences(klass, visitor); + VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor); // Visit arrays after. - GcRoot<mirror::String>* strings = GetStrings(); - for (size_t i = 0, num_strings = NumStrings(); i != num_strings; ++i) { - visitor.VisitRootIfNonNull(strings[i].AddressWithoutBarrier()); - } - GcRoot<mirror::Class>* resolved_types = GetResolvedTypes(); - for (size_t i = 0, num_types = NumResolvedTypes(); i != num_types; ++i) { - visitor.VisitRootIfNonNull(resolved_types[i].AddressWithoutBarrier()); + if (kVisitNativeRoots) { + GcRoot<mirror::String>* strings = GetStrings(); + for (size_t i = 0, num_strings = NumStrings(); i != num_strings; ++i) { + visitor.VisitRootIfNonNull(strings[i].AddressWithoutBarrier()); + } + GcRoot<mirror::Class>* resolved_types = GetResolvedTypes(); + for (size_t i = 0, num_types = NumResolvedTypes(); i != num_types; ++i) { + visitor.VisitRootIfNonNull(resolved_types[i].AddressWithoutBarrier()); + } } } diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc index 349a319992..7b058d0c92 100644 --- a/runtime/mirror/dex_cache.cc +++ b/runtime/mirror/dex_cache.cc @@ -51,10 +51,10 @@ void DexCache::Init(const DexFile* dex_file, SetDexFile(dex_file); SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_), location); - SetField64<false>(StringsOffset(), reinterpret_cast<uintptr_t>(strings)); - SetField64<false>(ResolvedTypesOffset(), reinterpret_cast<uintptr_t>(resolved_types)); - SetField64<false>(ResolvedMethodsOffset(), reinterpret_cast<uintptr_t>(resolved_methods)); - SetField64<false>(ResolvedFieldsOffset(), reinterpret_cast<uintptr_t>(resolved_fields)); + SetStrings(strings); + SetResolvedTypes(resolved_types); + SetResolvedMethods(resolved_methods); + SetResolvedFields(resolved_fields); SetField32<false>(NumStringsOffset(), num_strings); SetField32<false>(NumResolvedTypesOffset(), num_resolved_types); SetField32<false>(NumResolvedMethodsOffset(), num_resolved_methods); diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h index 32eb59540d..5ed061f6a4 100644 --- a/runtime/mirror/dex_cache.h +++ b/runtime/mirror/dex_cache.h @@ -137,18 +137,40 @@ class MANAGED DexCache FINAL : public Object { return GetFieldPtr<GcRoot<String>*>(StringsOffset()); } + void SetStrings(GcRoot<String>* strings) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { + SetFieldPtr<false>(StringsOffset(), strings); + } + GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { return GetFieldPtr<GcRoot<Class>*>(ResolvedTypesOffset()); } + void SetResolvedTypes(GcRoot<Class>* resolved_types) + ALWAYS_INLINE + SHARED_REQUIRES(Locks::mutator_lock_) { + SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types); + } + ArtMethod** GetResolvedMethods() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset()); } + void SetResolvedMethods(ArtMethod** resolved_methods) + ALWAYS_INLINE + SHARED_REQUIRES(Locks::mutator_lock_) { + SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods); + } + ArtField** GetResolvedFields() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { return GetFieldPtr<ArtField**>(ResolvedFieldsOffset()); } + void SetResolvedFields(ArtField** resolved_fields) + ALWAYS_INLINE + SHARED_REQUIRES(Locks::mutator_lock_) { + SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields); + } + size_t NumStrings() SHARED_REQUIRES(Locks::mutator_lock_) { return GetField32(NumStringsOffset()); } @@ -186,7 +208,10 @@ class MANAGED DexCache FINAL : public Object { private: // Visit instance fields of the dex cache as well as its associated arrays. - template <VerifyObjectFlags kVerifyFlags, typename Visitor> + template <bool kVisitNativeRoots, + VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier, + typename Visitor> void VisitReferences(mirror::Class* klass, const Visitor& visitor) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h index b21ecdf6e8..605deac608 100644 --- a/runtime/mirror/iftable.h +++ b/runtime/mirror/iftable.h @@ -34,8 +34,11 @@ class MANAGED IfTable FINAL : public ObjectArray<Object> { ALWAYS_INLINE void SetInterface(int32_t i, Class* interface) SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> PointerArray* GetMethodArray(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_) { - auto* method_array = down_cast<PointerArray*>(Get((i * kMax) + kMethodArray)); + auto* method_array = down_cast<PointerArray*>(Get<kVerifyFlags, kReadBarrierOption>( + (i * kMax) + kMethodArray)); DCHECK(method_array != nullptr); return method_array; } diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h index 460342807a..760de9ab40 100644 --- a/runtime/mirror/object-inl.h +++ b/runtime/mirror/object-inl.h @@ -1031,7 +1031,10 @@ inline bool Object::CasFieldStrongRelaxedObjectWithoutWriteBarrier( return success; } -template<bool kIsStatic, typename Visitor> +template<bool kIsStatic, + VerifyObjectFlags kVerifyFlags, + ReadBarrierOption kReadBarrierOption, + typename Visitor> inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) { if (!kIsStatic && (ref_offsets != mirror::Class::kClassWalkSuper)) { // Instance fields and not the slow-path. @@ -1047,9 +1050,12 @@ inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& v // There is no reference offset bitmap. In the non-static case, walk up the class // inheritance hierarchy and find reference offsets the hard way. In the static case, just // consider this class. - for (mirror::Class* klass = kIsStatic ? AsClass() : GetClass(); klass != nullptr; - klass = kIsStatic ? nullptr : klass->GetSuperClass()) { - size_t num_reference_fields = + for (mirror::Class* klass = kIsStatic + ? AsClass<kVerifyFlags, kReadBarrierOption>() + : GetClass<kVerifyFlags, kReadBarrierOption>(); + klass != nullptr; + klass = kIsStatic ? nullptr : klass->GetSuperClass<kVerifyFlags, kReadBarrierOption>()) { + const size_t num_reference_fields = kIsStatic ? klass->NumReferenceStaticFields() : klass->NumReferenceInstanceFields(); if (num_reference_fields == 0u) { continue; @@ -1072,49 +1078,54 @@ inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& v } } -template<typename Visitor> +template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor> inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) { - VisitFieldsReferences<false>(klass->GetReferenceInstanceOffsets<kVerifyNone>(), visitor); + VisitFieldsReferences<false, kVerifyFlags, kReadBarrierOption>( + klass->GetReferenceInstanceOffsets<kVerifyFlags>(), visitor); } -template<typename Visitor> +template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor> inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) { DCHECK(!klass->IsTemp()); - klass->VisitFieldsReferences<true>(0, visitor); + klass->VisitFieldsReferences<true, kVerifyFlags, kReadBarrierOption>(0, visitor); } -template<VerifyObjectFlags kVerifyFlags> +template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption> inline bool Object::IsClassLoader() { - return GetClass<kVerifyFlags>()->IsClassLoaderClass(); + return GetClass<kVerifyFlags, kReadBarrierOption>()->IsClassLoaderClass(); } -template<VerifyObjectFlags kVerifyFlags> +template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption> inline mirror::ClassLoader* Object::AsClassLoader() { - DCHECK(IsClassLoader<kVerifyFlags>()); + DCHECK((IsClassLoader<kVerifyFlags, kReadBarrierOption>())); return down_cast<mirror::ClassLoader*>(this); } -template<VerifyObjectFlags kVerifyFlags> +template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption> inline bool Object::IsDexCache() { - return GetClass<kVerifyFlags>()->IsDexCacheClass(); + return GetClass<kVerifyFlags, kReadBarrierOption>()->IsDexCacheClass(); } -template<VerifyObjectFlags kVerifyFlags> +template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption> inline mirror::DexCache* Object::AsDexCache() { - DCHECK(IsDexCache<kVerifyFlags>()); + DCHECK((IsDexCache<kVerifyFlags, kReadBarrierOption>())); return down_cast<mirror::DexCache*>(this); } -template <VerifyObjectFlags kVerifyFlags, typename Visitor, typename JavaLangRefVisitor> +template <bool kVisitNativeRoots, + VerifyObjectFlags kVerifyFlags, + ReadBarrierOption kReadBarrierOption, + typename Visitor, + typename JavaLangRefVisitor> inline void Object::VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor) { - mirror::Class* klass = GetClass<kVerifyFlags>(); + mirror::Class* klass = GetClass<kVerifyFlags, kReadBarrierOption>(); visitor(this, ClassOffset(), false); const uint32_t class_flags = klass->GetClassFlags<kVerifyNone>(); if (LIKELY(class_flags == kClassFlagNormal)) { DCHECK(!klass->IsVariableSize()); VisitInstanceFieldsReferences(klass, visitor); - DCHECK(!klass->IsClassClass()); + DCHECK((!klass->IsClassClass<kVerifyFlags, kReadBarrierOption>())); DCHECK(!klass->IsStringClass()); DCHECK(!klass->IsClassLoaderClass()); DCHECK(!klass->IsArrayClass()); @@ -1123,23 +1134,29 @@ inline void Object::VisitReferences(const Visitor& visitor, DCHECK(!klass->IsStringClass()); if (class_flags == kClassFlagClass) { DCHECK(klass->IsClassClass()); - AsClass<kVerifyNone>()->VisitReferences(klass, visitor); + AsClass<kVerifyNone>()->VisitReferences<kVisitNativeRoots, + kVerifyFlags, + kReadBarrierOption>(klass, visitor); } else if (class_flags == kClassFlagObjectArray) { - DCHECK(klass->IsObjectArrayClass()); + DCHECK((klass->IsObjectArrayClass<kVerifyFlags, kReadBarrierOption>())); AsObjectArray<mirror::Object, kVerifyNone>()->VisitReferences(visitor); } else if ((class_flags & kClassFlagReference) != 0) { VisitInstanceFieldsReferences(klass, visitor); ref_visitor(klass, AsReference()); } else if (class_flags == kClassFlagDexCache) { - mirror::DexCache* const dex_cache = AsDexCache<kVerifyFlags>(); - dex_cache->VisitReferences<kVerifyFlags>(klass, visitor); + mirror::DexCache* const dex_cache = AsDexCache<kVerifyFlags, kReadBarrierOption>(); + dex_cache->VisitReferences<kVisitNativeRoots, + kVerifyFlags, + kReadBarrierOption>(klass, visitor); } else { - mirror::ClassLoader* const class_loader = AsClassLoader<kVerifyFlags>(); - class_loader->VisitReferences<kVerifyFlags>(klass, visitor); + mirror::ClassLoader* const class_loader = AsClassLoader<kVerifyFlags, kReadBarrierOption>(); + class_loader->VisitReferences<kVisitNativeRoots, + kVerifyFlags, + kReadBarrierOption>(klass, visitor); } } else if (kIsDebugBuild) { - CHECK(!klass->IsClassClass()); - CHECK(!klass->IsObjectArrayClass()); + CHECK((!klass->IsClassClass<kVerifyFlags, kReadBarrierOption>())); + CHECK((!klass->IsObjectArrayClass<kVerifyFlags, kReadBarrierOption>())); // String still has instance fields for reflection purposes but these don't exist in // actual string instances. if (!klass->IsStringClass()) { @@ -1147,7 +1164,7 @@ inline void Object::VisitReferences(const Visitor& visitor, mirror::Class* super_class = klass; do { total_reference_instance_fields += super_class->NumReferenceInstanceFields(); - super_class = super_class->GetSuperClass(); + super_class = super_class->GetSuperClass<kVerifyFlags, kReadBarrierOption>(); } while (super_class != nullptr); // The only reference field should be the object's class. This field is handled at the // beginning of the function. diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h index 71e704e704..d635002d12 100644 --- a/runtime/mirror/object.h +++ b/runtime/mirror/object.h @@ -164,14 +164,18 @@ class MANAGED LOCKABLE Object { template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> ObjectArray<T>* AsObjectArray() SHARED_REQUIRES(Locks::mutator_lock_); - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> bool IsClassLoader() SHARED_REQUIRES(Locks::mutator_lock_); - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> ClassLoader* AsClassLoader() SHARED_REQUIRES(Locks::mutator_lock_); - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> bool IsDexCache() SHARED_REQUIRES(Locks::mutator_lock_); - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> DexCache* AsDexCache() SHARED_REQUIRES(Locks::mutator_lock_); template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, @@ -456,6 +460,13 @@ class MANAGED LOCKABLE Object { SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>( field_offset, new_value, sizeof(void*)); } + template<bool kTransactionActive, bool kCheckTransaction = true, + VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T> + void SetFieldPtr64(MemberOffset field_offset, T new_value) + SHARED_REQUIRES(Locks::mutator_lock_) { + SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>( + field_offset, new_value, 8u); + } template<bool kTransactionActive, bool kCheckTransaction = true, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T> @@ -475,7 +486,9 @@ class MANAGED LOCKABLE Object { } // TODO fix thread safety analysis broken by the use of template. This should be // SHARED_REQUIRES(Locks::mutator_lock_). - template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + template <bool kVisitNativeRoots = true, + VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor, typename JavaLangRefVisitor = VoidFunctor> void VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor) @@ -495,6 +508,11 @@ class MANAGED LOCKABLE Object { SHARED_REQUIRES(Locks::mutator_lock_) { return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, sizeof(void*)); } + template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> + T GetFieldPtr64(MemberOffset field_offset) + SHARED_REQUIRES(Locks::mutator_lock_) { + return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, 8u); + } template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> ALWAYS_INLINE T GetFieldPtrWithSize(MemberOffset field_offset, size_t pointer_size) @@ -511,13 +529,20 @@ class MANAGED LOCKABLE Object { } // TODO: Fixme when anotatalysis works with visitors. - template<bool kIsStatic, typename Visitor> + template<bool kIsStatic, + VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier, + typename Visitor> void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) HOT_ATTR NO_THREAD_SAFETY_ANALYSIS; - template<typename Visitor> + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier, + typename Visitor> void VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR SHARED_REQUIRES(Locks::mutator_lock_); - template<typename Visitor> + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier, + typename Visitor> void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR SHARED_REQUIRES(Locks::mutator_lock_); diff --git a/runtime/mirror/object_array-inl.h b/runtime/mirror/object_array-inl.h index 5337760fb8..6f9d64297a 100644 --- a/runtime/mirror/object_array-inl.h +++ b/runtime/mirror/object_array-inl.h @@ -55,13 +55,13 @@ inline ObjectArray<T>* ObjectArray<T>::Alloc(Thread* self, Class* object_array_c Runtime::Current()->GetHeap()->GetCurrentAllocator()); } -template<class T> +template<class T> template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption> inline T* ObjectArray<T>::Get(int32_t i) { if (!CheckIsValidIndex(i)) { DCHECK(Thread::Current()->IsExceptionPending()); return nullptr; } - return GetFieldObject<T>(OffsetOfElement(i)); + return GetFieldObject<T, kVerifyFlags, kReadBarrierOption>(OffsetOfElement(i)); } template<class T> template<VerifyObjectFlags kVerifyFlags> diff --git a/runtime/mirror/object_array.h b/runtime/mirror/object_array.h index b45cafd2a3..1b1295cedb 100644 --- a/runtime/mirror/object_array.h +++ b/runtime/mirror/object_array.h @@ -37,7 +37,9 @@ class MANAGED ObjectArray: public Array { static ObjectArray<T>* Alloc(Thread* self, Class* object_array_class, int32_t length) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_); - T* Get(int32_t i) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, + ReadBarrierOption kReadBarrierOption = kWithReadBarrier> + ALWAYS_INLINE T* Get(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_); // Returns true if the object can be stored into the array. If not, throws // an ArrayStoreException and returns false. diff --git a/runtime/native/dalvik_system_DexFile.cc b/runtime/native/dalvik_system_DexFile.cc index da6cf1f198..6643ac2231 100644 --- a/runtime/native/dalvik_system_DexFile.cc +++ b/runtime/native/dalvik_system_DexFile.cc @@ -154,10 +154,8 @@ static jobject DexFile_openDexFileNative(JNIEnv* env, jstring javaSourceName, jstring javaOutputName, jint flags ATTRIBUTE_UNUSED, - // class_loader will be used for app images. - jobject class_loader ATTRIBUTE_UNUSED, - // dex_elements will be used for app images. - jobject dex_elements ATTRIBUTE_UNUSED) { + jobject class_loader, + jobjectArray dex_elements) { ScopedUtfChars sourceName(env, javaSourceName); if (sourceName.c_str() == nullptr) { return 0; @@ -174,6 +172,8 @@ static jobject DexFile_openDexFileNative(JNIEnv* env, dex_files = runtime->GetOatFileManager().OpenDexFilesFromOat(sourceName.c_str(), outputName.c_str(), + class_loader, + dex_elements, /*out*/ &oat_file, /*out*/ &error_msgs); diff --git a/runtime/oat_file_assistant.cc b/runtime/oat_file_assistant.cc index d6b08684b9..2bd5c76c00 100644 --- a/runtime/oat_file_assistant.cc +++ b/runtime/oat_file_assistant.cc @@ -38,6 +38,7 @@ #include "os.h" #include "profiler.h" #include "runtime.h" +#include "scoped_thread_state_change.h" #include "ScopedFd.h" #include "utils.h" @@ -326,6 +327,17 @@ bool OatFileAssistant::OdexFileIsUpToDate() { return cached_odex_file_is_up_to_date_; } +std::string OatFileAssistant::ArtFileName(const OatFile* oat_file) const { + const std::string oat_file_location = oat_file->GetLocation(); + // Replace extension with .art + const size_t last_ext = oat_file_location.find_last_of('.'); + if (last_ext == std::string::npos) { + LOG(ERROR) << "No extension in oat file " << oat_file_location; + return std::string(); + } + return oat_file_location.substr(0, last_ext) + ".art"; +} + const std::string* OatFileAssistant::OatFileName() { if (!cached_oat_file_name_attempted_) { cached_oat_file_name_attempted_ = true; @@ -1003,5 +1015,22 @@ ProfileFile* OatFileAssistant::GetOldProfile() { return old_profile_load_succeeded_ ? &cached_old_profile_ : nullptr; } +gc::space::ImageSpace* OatFileAssistant::OpenImageSpace(const OatFile* oat_file) { + DCHECK(oat_file != nullptr); + std::string art_file = ArtFileName(oat_file); + if (art_file.empty()) { + return nullptr; + } + std::string error_msg; + ScopedObjectAccess soa(Thread::Current()); + gc::space::ImageSpace* ret = gc::space::ImageSpace::CreateFromAppImage(art_file.c_str(), + oat_file, + &error_msg); + if (ret == nullptr) { + LOG(INFO) << "Failed to open app image " << art_file.c_str() << " " << error_msg; + } + return ret; +} + } // namespace art diff --git a/runtime/oat_file_assistant.h b/runtime/oat_file_assistant.h index f781532b14..7b45bca946 100644 --- a/runtime/oat_file_assistant.h +++ b/runtime/oat_file_assistant.h @@ -30,6 +30,12 @@ namespace art { +namespace gc { +namespace space { +class ImageSpace; +} // namespace space +} // namespace gc + // Class for assisting with oat file management. // // This class collects common utilities for determining the status of an oat @@ -163,6 +169,9 @@ class OatFileAssistant { // the OatFileAssistant object. std::unique_ptr<OatFile> GetBestOatFile(); + // Open and returns an image space associated with the oat file. + gc::space::ImageSpace* OpenImageSpace(const OatFile* oat_file); + // Loads the dex files in the given oat file for the given dex location. // The oat file should be up to date for the given dex location. // This loads multiple dex files in the case of multidex. @@ -214,6 +223,9 @@ class OatFileAssistant { bool OatFileNeedsRelocation(); bool OatFileIsUpToDate(); + // Return image file name. Does not cache since it relies on the oat file. + std::string ArtFileName(const OatFile* oat_file) const; + // These methods return the status for a given opened oat file with respect // to the dex location. OatStatus GivenOatFileStatus(const OatFile& file); diff --git a/runtime/oat_file_assistant_test.cc b/runtime/oat_file_assistant_test.cc index f994f0c99d..25dcbe4c63 100644 --- a/runtime/oat_file_assistant_test.cc +++ b/runtime/oat_file_assistant_test.cc @@ -996,6 +996,8 @@ class RaceGenerateTask : public Task { dex_files = Runtime::Current()->GetOatFileManager().OpenDexFilesFromOat( dex_location_.c_str(), oat_location_.c_str(), + /*class_loader*/nullptr, + /*dex_elements*/nullptr, &oat_file, &error_msgs); CHECK(!dex_files.empty()) << Join(error_msgs, '\n'); diff --git a/runtime/oat_file_manager.cc b/runtime/oat_file_manager.cc index 7f216f9418..b34b5505eb 100644 --- a/runtime/oat_file_manager.cc +++ b/runtime/oat_file_manager.cc @@ -22,9 +22,13 @@ #include "base/logging.h" #include "base/stl_util.h" +#include "class_linker.h" #include "dex_file-inl.h" #include "gc/space/image_space.h" +#include "handle_scope-inl.h" +#include "mirror/class_loader.h" #include "oat_file_assistant.h" +#include "scoped_thread_state_change.h" #include "thread-inl.h" namespace art { @@ -34,6 +38,9 @@ namespace art { // normal builds. static constexpr bool kDuplicateClassesCheck = kIsDebugBuild; +// If true, then we attempt to load the application image if it exists. +static constexpr bool kEnableAppImage = true; + const OatFile* OatFileManager::RegisterOatFile(std::unique_ptr<const OatFile> oat_file) { WriterMutexLock mu(Thread::Current(), *Locks::oat_file_manager_lock_); DCHECK(oat_file != nullptr); @@ -284,6 +291,8 @@ bool OatFileManager::HasCollisions(const OatFile* oat_file, std::vector<std::unique_ptr<const DexFile>> OatFileManager::OpenDexFilesFromOat( const char* dex_location, const char* oat_location, + jobject class_loader, + jobjectArray dex_elements, const OatFile** out_oat_file, std::vector<std::string>* error_msgs) { CHECK(dex_location != nullptr); @@ -291,12 +300,13 @@ std::vector<std::unique_ptr<const DexFile>> OatFileManager::OpenDexFilesFromOat( // Verify we aren't holding the mutator lock, which could starve GC if we // have to generate or relocate an oat file. - Locks::mutator_lock_->AssertNotHeld(Thread::Current()); - + Thread* const self = Thread::Current(); + Locks::mutator_lock_->AssertNotHeld(self); + Runtime* const runtime = Runtime::Current(); OatFileAssistant oat_file_assistant(dex_location, oat_location, kRuntimeISA, - !Runtime::Current()->IsAotCompiler()); + !runtime->IsAotCompiler()); // Lock the target oat location to avoid races generating and loading the // oat file. @@ -317,6 +327,7 @@ std::vector<std::unique_ptr<const DexFile>> OatFileManager::OpenDexFilesFromOat( // Get the oat file on disk. std::unique_ptr<const OatFile> oat_file(oat_file_assistant.GetBestOatFile().release()); + if (oat_file != nullptr) { // Take the file only if it has no collisions, or we must take it because of preopting. bool accept_oat_file = !HasCollisions(oat_file.get(), /*out*/ &error_msg); @@ -351,7 +362,42 @@ std::vector<std::unique_ptr<const DexFile>> OatFileManager::OpenDexFilesFromOat( // Load the dex files from the oat file. if (source_oat_file != nullptr) { - dex_files = oat_file_assistant.LoadDexFiles(*source_oat_file, dex_location); + bool added_image_space = false; + if (source_oat_file->IsExecutable()) { + std::unique_ptr<gc::space::ImageSpace> image_space( + kEnableAppImage ? oat_file_assistant.OpenImageSpace(source_oat_file) : nullptr); + if (image_space != nullptr) { + ScopedObjectAccess soa(self); + StackHandleScope<1> hs(self); + Handle<mirror::ClassLoader> h_loader( + hs.NewHandle(soa.Decode<mirror::ClassLoader*>(class_loader))); + // Can not load app image without class loader. + if (h_loader.Get() != nullptr) { + std::string temp_error_msg; + // Add image space has a race condition since other threads could be reading from the + // spaces array. + runtime->GetHeap()->AddSpace(image_space.get()); + added_image_space = true; + if (!runtime->GetClassLinker()->AddImageSpace(image_space.get(), + h_loader, + dex_elements, + dex_location, + /*out*/&dex_files, + /*out*/&temp_error_msg)) { + LOG(INFO) << "Failed to add image file " << temp_error_msg; + dex_files.clear(); + runtime->GetHeap()->RemoveSpace(image_space.get()); + added_image_space = false; + // Non-fatal, don't update error_msg. + } + image_space.release(); + } + } + } + if (!added_image_space) { + DCHECK(dex_files.empty()); + dex_files = oat_file_assistant.LoadDexFiles(*source_oat_file, dex_location); + } if (dex_files.empty()) { error_msgs->push_back("Failed to open dex files from " + source_oat_file->GetLocation()); } diff --git a/runtime/oat_file_manager.h b/runtime/oat_file_manager.h index 4690e4550b..c508c4bf07 100644 --- a/runtime/oat_file_manager.h +++ b/runtime/oat_file_manager.h @@ -25,6 +25,7 @@ #include "base/macros.h" #include "base/mutex.h" +#include "jni.h" namespace art { @@ -101,6 +102,8 @@ class OatFileManager { std::vector<std::unique_ptr<const DexFile>> OpenDexFilesFromOat( const char* dex_location, const char* oat_location, + jobject class_loader, + jobjectArray dex_elements, /*out*/ const OatFile** out_oat_file, /*out*/ std::vector<std::string>* error_msgs) REQUIRES(!Locks::oat_file_manager_lock_, !Locks::mutator_lock_); diff --git a/runtime/runtime.cc b/runtime/runtime.cc index e30c26dd5e..f138c811d7 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -1097,7 +1097,7 @@ bool Runtime::Init(RuntimeArgumentMap&& runtime_options_in) { if (GetHeap()->HasBootImageSpace()) { ATRACE_BEGIN("InitFromImage"); std::string error_msg; - bool result = class_linker_->InitFromImage(&error_msg); + bool result = class_linker_->InitFromBootImage(&error_msg); ATRACE_END(); if (!result) { LOG(ERROR) << "Could not initialize from image: " << error_msg; diff --git a/runtime/runtime.h b/runtime/runtime.h index 20acffb697..c8c2ee530d 100644 --- a/runtime/runtime.h +++ b/runtime/runtime.h @@ -366,7 +366,7 @@ class Runtime { SHARED_REQUIRES(Locks::mutator_lock_); // Returns a special method that calls into a trampoline for runtime method resolution - ArtMethod* GetResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_); + ArtMethod* GetResolutionMethod(); bool HasResolutionMethod() const { return resolution_method_ != nullptr; @@ -377,8 +377,8 @@ class Runtime { ArtMethod* CreateResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_); // Returns a special method that calls into a trampoline for runtime imt conflicts. - ArtMethod* GetImtConflictMethod() SHARED_REQUIRES(Locks::mutator_lock_); - ArtMethod* GetImtUnimplementedMethod() SHARED_REQUIRES(Locks::mutator_lock_); + ArtMethod* GetImtConflictMethod(); + ArtMethod* GetImtUnimplementedMethod(); bool HasImtConflictMethod() const { return imt_conflict_method_ != nullptr; diff --git a/runtime/thread_list.cc b/runtime/thread_list.cc index 727ffe5f33..c8714a60a8 100644 --- a/runtime/thread_list.cc +++ b/runtime/thread_list.cc @@ -1191,6 +1191,7 @@ void ThreadList::SuspendAllDaemonThreadsForShutdown() { } LOG(WARNING) << "timed out suspending all daemon threads"; } + void ThreadList::Register(Thread* self) { DCHECK_EQ(self, Thread::Current()); diff --git a/runtime/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h index 90e24b9632..f6ee6a2b1a 100644 --- a/runtime/utils/dex_cache_arrays_layout-inl.h +++ b/runtime/utils/dex_cache_arrays_layout-inl.h @@ -60,7 +60,9 @@ inline size_t DexCacheArraysLayout::TypeOffset(uint32_t type_idx) const { } inline size_t DexCacheArraysLayout::TypesSize(size_t num_elements) const { - return ArraySize(sizeof(GcRoot<mirror::Class>), num_elements); + // App image patching relies on having enough room for a forwarding pointer in the types array. + // See FixupArtMethodArrayVisitor and ClassLinker::AddImageSpace. + return std::max(ArraySize(sizeof(GcRoot<mirror::Class>), num_elements), pointer_size_); } inline size_t DexCacheArraysLayout::TypesAlignment() const { @@ -72,7 +74,8 @@ inline size_t DexCacheArraysLayout::MethodOffset(uint32_t method_idx) const { } inline size_t DexCacheArraysLayout::MethodsSize(size_t num_elements) const { - return ArraySize(pointer_size_, num_elements); + // App image patching relies on having enough room for a forwarding pointer in the methods array. + return std::max(ArraySize(pointer_size_, num_elements), pointer_size_); } inline size_t DexCacheArraysLayout::MethodsAlignment() const { diff --git a/runtime/well_known_classes.cc b/runtime/well_known_classes.cc index 83009212e8..cfa8329a36 100644 --- a/runtime/well_known_classes.cc +++ b/runtime/well_known_classes.cc @@ -129,6 +129,7 @@ jmethodID WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_broadcast; jmethodID WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch; jfieldID WellKnownClasses::dalvik_system_DexFile_cookie; +jfieldID WellKnownClasses::dalvik_system_DexFile_fileName; jfieldID WellKnownClasses::dalvik_system_PathClassLoader_pathList; jfieldID WellKnownClasses::dalvik_system_DexPathList_dexElements; jfieldID WellKnownClasses::dalvik_system_DexPathList__Element_dexFile; @@ -333,6 +334,7 @@ void WellKnownClasses::Init(JNIEnv* env) { true, "newStringFromStringBuilder", "(Ljava/lang/StringBuilder;)Ljava/lang/String;"); dalvik_system_DexFile_cookie = CacheField(env, dalvik_system_DexFile, false, "mCookie", "Ljava/lang/Object;"); + dalvik_system_DexFile_fileName = CacheField(env, dalvik_system_DexFile, false, "mFileName", "Ljava/lang/String;"); dalvik_system_PathClassLoader_pathList = CacheField(env, dalvik_system_PathClassLoader, false, "pathList", "Ldalvik/system/DexPathList;"); dalvik_system_DexPathList_dexElements = CacheField(env, dalvik_system_DexPathList, false, "dexElements", "[Ldalvik/system/DexPathList$Element;"); dalvik_system_DexPathList__Element_dexFile = CacheField(env, dalvik_system_DexPathList__Element, false, "dexFile", "Ldalvik/system/DexFile;"); diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h index 55158a77c6..482ff0a8f4 100644 --- a/runtime/well_known_classes.h +++ b/runtime/well_known_classes.h @@ -140,6 +140,7 @@ struct WellKnownClasses { static jmethodID org_apache_harmony_dalvik_ddmc_DdmServer_dispatch; static jfieldID dalvik_system_DexFile_cookie; + static jfieldID dalvik_system_DexFile_fileName; static jfieldID dalvik_system_DexPathList_dexElements; static jfieldID dalvik_system_DexPathList__Element_dexFile; static jfieldID dalvik_system_PathClassLoader_pathList; diff --git a/test/496-checker-inlining-and-class-loader/src/Main.java b/test/496-checker-inlining-and-class-loader/src/Main.java index 39c031a6bc..ea6df623a1 100644 --- a/test/496-checker-inlining-and-class-loader/src/Main.java +++ b/test/496-checker-inlining-and-class-loader/src/Main.java @@ -16,6 +16,7 @@ import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.ArrayList; import java.util.List; class MyClassLoader extends ClassLoader { @@ -30,18 +31,31 @@ class MyClassLoader extends ClassLoader { Object pathList = f.get(loader); // Some magic to get access to the dexField field of pathList. + // Need to make a copy of the dex elements since we don't want an app image with pre-resolved + // things. f = pathList.getClass().getDeclaredField("dexElements"); f.setAccessible(true); - dexElements = (Object[]) f.get(pathList); - dexFileField = dexElements[0].getClass().getDeclaredField("dexFile"); - dexFileField.setAccessible(true); + Object[] dexElements = (Object[]) f.get(pathList); + f = dexElements[0].getClass().getDeclaredField("dexFile"); + f.setAccessible(true); + for (Object element : dexElements) { + Object dexFile = f.get(element); + // Make copy. + Field fileNameField = dexFile.getClass().getDeclaredField("mFileName"); + fileNameField.setAccessible(true); + dexFiles.add(dexFile.getClass().getDeclaredConstructor(String.class).newInstance( + fileNameField.get(dexFile))); + } } - Object[] dexElements; + ArrayList<Object> dexFiles = new ArrayList<Object>(); Field dexFileField; protected Class<?> loadClass(String className, boolean resolve) throws ClassNotFoundException { - System.out.println("Request for " + className); + // Other classes may also get loaded, ignore those. + if (className.equals("LoadedByMyClassLoader") || className.equals("FirstSeenByMyClassLoader")) { + System.out.println("Request for " + className); + } // We're only going to handle LoadedByMyClassLoader. if (className != "LoadedByMyClassLoader") { @@ -50,13 +64,12 @@ class MyClassLoader extends ClassLoader { // Mimic what DexPathList.findClass is doing. try { - for (Object element : dexElements) { - Object dex = dexFileField.get(element); - Method method = dex.getClass().getDeclaredMethod( + for (Object dexFile : dexFiles) { + Method method = dexFile.getClass().getDeclaredMethod( "loadClassBinaryName", String.class, ClassLoader.class, List.class); - if (dex != null) { - Class clazz = (Class)method.invoke(dex, className, this, null); + if (dexFile != null) { + Class clazz = (Class)method.invoke(dexFile, className, this, null); if (clazz != null) { return clazz; } diff --git a/test/506-verify-aput/src/Main.java b/test/506-verify-aput/src/Main.java index 8359f2c452..08368d4fa3 100644 --- a/test/506-verify-aput/src/Main.java +++ b/test/506-verify-aput/src/Main.java @@ -23,11 +23,12 @@ public class Main { try { Class.forName("VerifyAPut1"); throw new Error("expected verification error"); - } catch (VerifyError e) { /* ignore */ } - + } catch (VerifyError e) { /* ignore */ + } catch (Error e) { System.out.println(e.getClass() + " " + e.getClass().getClassLoader()); } try { Class.forName("VerifyAPut2"); throw new Error("expected verification error"); - } catch (VerifyError e) { /* ignore */ } + } catch (VerifyError e) { /* ignore */ + } catch (Error e) { System.out.println(e.getClass() + " " + e.getClass().getClassLoader()); } } } |