Revert "Revert "Load app images""
This reverts commit 1bc977cf2f8199311a97f2ba9431a184540e3e9c.
Bug: 22858531
Change-Id: Ide00bf3a73a02cba3bb364177204ad1b13f70295
diff --git a/runtime/art_field-inl.h b/runtime/art_field-inl.h
index 4166e22..3463b0d 100644
--- a/runtime/art_field-inl.h
+++ b/runtime/art_field-inl.h
@@ -37,7 +37,7 @@
GcRootSource gc_root_source(this);
mirror::Class* result = declaring_class_.Read(&gc_root_source);
DCHECK(result != nullptr);
- DCHECK(result->IsLoaded() || result->IsErroneous());
+ DCHECK(result->IsLoaded() || result->IsErroneous()) << result->GetStatus();
return result;
}
@@ -334,6 +334,15 @@
visitor.VisitRoot(declaring_class_.AddressWithoutBarrier());
}
+template <typename Visitor>
+inline void ArtField::UpdateObjects(const Visitor& visitor) {
+ mirror::Class* old_class = DeclaringClassRoot().Read<kWithoutReadBarrier>();
+ mirror::Class* new_class = visitor(old_class);
+ if (old_class != new_class) {
+ SetDeclaringClass(new_class);
+ }
+}
+
} // namespace art
#endif // ART_RUNTIME_ART_FIELD_INL_H_
diff --git a/runtime/art_field.h b/runtime/art_field.h
index a943a34..ee1ba1f 100644
--- a/runtime/art_field.h
+++ b/runtime/art_field.h
@@ -190,6 +190,11 @@
return declaring_class_;
}
+ // Update the declaring class with the passed in visitor. Does not use read barrier.
+ template <typename Visitor>
+ ALWAYS_INLINE void UpdateObjects(const Visitor& visitor)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
private:
mirror::Class* ProxyFindSystemClass(const char* descriptor)
SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index a5f5c49..74eb722 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -467,6 +467,43 @@
}
}
+template <typename Visitor>
+inline void ArtMethod::UpdateObjectsForImageRelocation(const Visitor& visitor) {
+ mirror::Class* old_class = GetDeclaringClassNoBarrier();
+ mirror::Class* new_class = visitor(old_class);
+ if (old_class != new_class) {
+ SetDeclaringClass(new_class);
+ }
+ ArtMethod** old_methods = GetDexCacheResolvedMethods(sizeof(void*));
+ ArtMethod** new_methods = visitor(old_methods);
+ if (old_methods != new_methods) {
+ SetDexCacheResolvedMethods(new_methods, sizeof(void*));
+ }
+ GcRoot<mirror::Class>* old_types = GetDexCacheResolvedTypes(sizeof(void*));
+ GcRoot<mirror::Class>* new_types = visitor(old_types);
+ if (old_types != new_types) {
+ SetDexCacheResolvedTypes(new_types, sizeof(void*));
+ }
+}
+
+template <typename Visitor>
+inline void ArtMethod::UpdateEntrypoints(const Visitor& visitor) {
+ if (IsNative()) {
+ const void* old_native_code = GetEntryPointFromJni();
+ const void* new_native_code = visitor(old_native_code);
+ if (old_native_code != new_native_code) {
+ SetEntryPointFromJni(new_native_code);
+ }
+ } else {
+ DCHECK(GetEntryPointFromJni() == nullptr);
+ }
+ const void* old_code = GetEntryPointFromQuickCompiledCode();
+ const void* new_code = visitor(old_code);
+ if (old_code != new_code) {
+ SetEntryPointFromQuickCompiledCode(new_code);
+ }
+}
+
} // namespace art
#endif // ART_RUNTIME_ART_METHOD_INL_H_
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 0be2fa2..440e796 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -477,6 +477,17 @@
// Returns whether the method has any compiled code, JIT or AOT.
bool HasAnyCompiledCode() SHARED_REQUIRES(Locks::mutator_lock_);
+
+ // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
+ // Does not use read barrier.
+ template <typename Visitor>
+ ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
+ // Update entry points by passing them through the visitor.
+ template <typename Visitor>
+ ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor);
+
protected:
// Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
// The class we are a part of.
diff --git a/runtime/base/logging.h b/runtime/base/logging.h
index 115c260..de46b0c 100644
--- a/runtime/base/logging.h
+++ b/runtime/base/logging.h
@@ -53,6 +53,7 @@
bool third_party_jni; // Enabled with "-verbose:third-party-jni".
bool threads;
bool verifier;
+ bool image;
};
// Global log verbosity setting, initialized by InitLogging.
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index ed833c4..ff38394 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -336,6 +336,10 @@
// Use the pointer size from the runtime since we are probably creating the image.
image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
+ if (!ValidPointerSize(image_pointer_size_)) {
+ *error_msg = StringPrintf("Invalid image pointer size: %zu", image_pointer_size_);
+ return false;
+ }
// java_lang_Class comes first, it's needed for AllocClass
// The GC can't handle an object with a null class since we can't get the size of this object.
@@ -489,7 +493,7 @@
return false;
}
AppendToBootClassPath(self, *dex_file);
- opened_dex_files_.push_back(std::move(dex_file));
+ boot_dex_files_.push_back(std::move(dex_file));
}
// now we can use FindSystemClass
@@ -878,6 +882,7 @@
ArtMethod* m;
bool error;
};
+
static void CheckTrampolines(mirror::Object* obj, void* arg) NO_THREAD_SAFETY_ANALYSIS {
if (obj->IsClass()) {
mirror::Class* klass = obj->AsClass();
@@ -896,8 +901,8 @@
}
}
-bool ClassLinker::InitFromImage(std::string* error_msg) {
- VLOG(startup) << "ClassLinker::InitFromImage entering";
+bool ClassLinker::InitFromBootImage(std::string* error_msg) {
+ VLOG(startup) << __FUNCTION__ << " entering";
CHECK(!init_done_);
Runtime* const runtime = Runtime::Current();
@@ -906,6 +911,21 @@
std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
CHECK(!spaces.empty());
image_pointer_size_ = spaces[0]->GetImageHeader().GetPointerSize();
+ if (!ValidPointerSize(image_pointer_size_)) {
+ *error_msg = StringPrintf("Invalid image pointer size: %zu", image_pointer_size_);
+ return false;
+ }
+ if (!runtime->IsAotCompiler()) {
+ // Only the Aot compiler supports having an image with a different pointer size than the
+ // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
+ // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
+ if (image_pointer_size_ != sizeof(void*)) {
+ *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
+ image_pointer_size_,
+ sizeof(void*));
+ return false;
+ }
+ }
dex_cache_boot_image_class_lookup_required_ = true;
std::vector<const OatFile*> oat_files =
runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
@@ -957,19 +977,10 @@
}
}
- StackHandleScopeCollection handles(self);
- std::vector<Handle<mirror::ObjectArray<mirror::DexCache>>> dex_caches_vector;
- for (gc::space::ImageSpace* space : spaces) {
- Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(handles.NewHandle(
- space->GetImageHeader().GetImageRoot(ImageHeader::kDexCaches)->
- AsObjectArray<mirror::DexCache>()));
- dex_caches_vector.push_back(dex_caches);
- }
-
- Handle<mirror::ObjectArray<mirror::Class>> class_roots(handles.NewHandle(
- spaces[0]->GetImageHeader().GetImageRoot(ImageHeader::kClassRoots)->
- AsObjectArray<mirror::Class>()));
- class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(class_roots.Get());
+ class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
+ down_cast<mirror::ObjectArray<mirror::Class>*>(
+ spaces[0]->GetImageHeader().GetImageRoot(ImageHeader::kClassRoots)));
+ mirror::Class::SetClassClass(class_roots_.Read()->Get(kJavaLangClass));
// Special case of setting up the String class early so that we can test arbitrary objects
// as being Strings or not
@@ -982,116 +993,6 @@
runtime->SetSentinel(heap->AllocNonMovableObject<true>(
self, java_lang_Object, java_lang_Object->GetObjectSize(), VoidFunctor()));
- uint32_t dex_file_count = 0;
- for (const OatFile* oat_file : oat_files) {
- dex_file_count += oat_file->GetOatHeader().GetDexFileCount();
- }
- uint32_t dex_caches_count = 0;
- for (auto dex_caches : dex_caches_vector) {
- dex_caches_count += dex_caches->GetLength();
- }
- if (dex_file_count != dex_caches_count) {
- *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
- "image";
- return false;
- }
- for (auto dex_caches : dex_caches_vector) {
- for (int32_t i = 0; i < dex_caches->GetLength(); i++) {
- StackHandleScope<1> hs2(self);
- Handle<mirror::DexCache> dex_cache(hs2.NewHandle(dex_caches->Get(i)));
- const std::string& dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
- const OatFile::OatDexFile* oat_dex_file = nullptr;
- for (const OatFile* oat_file : oat_files) {
- const OatFile::OatDexFile* oat_dex =
- oat_file->GetOatDexFile(dex_file_location.c_str(), nullptr, false);
- if (oat_dex != nullptr) {
- DCHECK(oat_dex_file == nullptr);
- oat_dex_file = oat_dex;
- }
- }
-
- if (oat_dex_file == nullptr) {
- *error_msg = StringPrintf("Failed finding oat dex file for %s",
- dex_file_location.c_str());
- return false;
- }
- std::string inner_error_msg;
- std::unique_ptr<const DexFile> dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
- if (dex_file == nullptr) {
- *error_msg = StringPrintf("Failed to open dex file %s error '%s'",
- dex_file_location.c_str(),
- inner_error_msg.c_str());
- return false;
- }
-
- if (kSanityCheckObjects) {
- SanityCheckArtMethodPointerArray(dex_cache->GetResolvedMethods(),
- dex_cache->NumResolvedMethods(),
- image_pointer_size_,
- spaces);
- }
-
- if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
- *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
- dex_file_location.c_str(),
- dex_file->GetLocationChecksum(),
- oat_dex_file->GetDexFileLocationChecksum());
- return false;
- }
-
- AppendToBootClassPath(*dex_file.get(), dex_cache);
- opened_dex_files_.push_back(std::move(dex_file));
- }
- }
-
- if (!ValidPointerSize(image_pointer_size_)) {
- *error_msg = StringPrintf("Invalid image pointer size: %zu", image_pointer_size_);
- return false;
- }
-
- // Set classes on AbstractMethod early so that IsMethod tests can be performed during the live
- // bitmap walk.
- if (!runtime->IsAotCompiler()) {
- // Only the Aot compiler supports having an image with a different pointer size than the
- // runtime. This happens on the host for compile 32 bit tests since we use a 64 bit libart
- // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
- if (image_pointer_size_ != sizeof(void*)) {
- *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
- image_pointer_size_ ,
- sizeof(void*));
- return false;
- }
- }
-
- if (kSanityCheckObjects) {
- for (auto dex_caches : dex_caches_vector) {
- for (int32_t i = 0; i < dex_caches->GetLength(); i++) {
- auto* dex_cache = dex_caches->Get(i);
- for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) {
- auto* field = dex_cache->GetResolvedField(j, image_pointer_size_);
- if (field != nullptr) {
- CHECK(field->GetDeclaringClass()->GetClass() != nullptr);
- }
- }
- }
- }
- heap->VisitObjects(SanityCheckObjectsCallback, nullptr);
- }
-
- // Set entry point to interpreter if in InterpretOnly mode.
- if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
- for (gc::space::ImageSpace* space : spaces) {
- const ImageHeader& header = space->GetImageHeader();
- const ImageSection& methods = header.GetMethodsSection();
- SetInterpreterEntrypointArtMethodVisitor visitor(image_pointer_size_);
- methods.VisitPackedArtMethods(&visitor, space->Begin(), image_pointer_size_);
- }
- }
-
- // reinit class_roots_
- mirror::Class::SetClassClass(class_roots->Get(kJavaLangClass));
- class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(class_roots.Get());
-
// reinit array_iftable_ from any array class instance, they should be ==
array_iftable_ = GcRoot<mirror::IfTable>(GetClassRoot(kObjectArrayClass)->GetIfTable());
DCHECK_EQ(array_iftable_.Read(), GetClassRoot(kBooleanArrayClass)->GetIfTable());
@@ -1114,30 +1015,573 @@
mirror::Throwable::SetClass(GetClassRoot(kJavaLangThrowable));
mirror::StackTraceElement::SetClass(GetClassRoot(kJavaLangStackTraceElement));
- size_t class_tables_added = 0;
- for (gc::space::ImageSpace* space : spaces) {
- const ImageHeader& header = space->GetImageHeader();
- const ImageSection& section = header.GetImageSection(ImageHeader::kSectionClassTable);
- if (section.Size() > 0u) {
- WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
- ClassTable* const class_table = InsertClassTableForClassLoader(nullptr);
- class_table->ReadFromMemory(space->Begin() + section.Offset());
- ++class_tables_added;
+ for (gc::space::ImageSpace* image_space : spaces) {
+ // Boot class loader, use a null handle.
+ std::vector<std::unique_ptr<const DexFile>> dex_files;
+ if (!AddImageSpace(image_space,
+ ScopedNullHandle<mirror::ClassLoader>(),
+ /*dex_elements*/nullptr,
+ /*dex_location*/nullptr,
+ /*out*/&dex_files,
+ error_msg)) {
+ return false;
}
+ // Append opened dex files at the end.
+ boot_dex_files_.insert(boot_dex_files_.end(),
+ std::make_move_iterator(dex_files.begin()),
+ std::make_move_iterator(dex_files.end()));
}
- if (class_tables_added != 0) {
- // Either all of the image spaces have an empty class section or none do. In the case where
- // an image space has no classes, it will still have a non-empty class section that contains
- // metadata.
- CHECK_EQ(spaces.size(), class_tables_added)
- << "Expected non-empty class section for each image space.";
- dex_cache_boot_image_class_lookup_required_ = false;
- }
-
FinishInit(self);
- VLOG(startup) << "ClassLinker::InitFromImage exiting";
+ VLOG(startup) << __FUNCTION__ << " exiting";
+ return true;
+}
+static bool IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
+ mirror::ClassLoader* class_loader)
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ return class_loader == nullptr ||
+ class_loader->GetClass() ==
+ soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_BootClassLoader);
+}
+
+static mirror::String* GetDexPathListElementName(ScopedObjectAccessUnchecked& soa,
+ mirror::Object* element)
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ ArtField* const dex_file_field =
+ soa.DecodeField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
+ ArtField* const dex_file_name_field =
+ soa.DecodeField(WellKnownClasses::dalvik_system_DexFile_fileName);
+ DCHECK(dex_file_field != nullptr);
+ DCHECK(dex_file_name_field != nullptr);
+ DCHECK(element != nullptr);
+ CHECK_EQ(dex_file_field->GetDeclaringClass(), element->GetClass()) << PrettyTypeOf(element);
+ mirror::Object* dex_file = dex_file_field->GetObject(element);
+ if (dex_file == nullptr) {
+ return nullptr;
+ }
+ mirror::Object* const name_object = dex_file_name_field->GetObject(dex_file);
+ if (name_object != nullptr) {
+ return name_object->AsString();
+ }
+ return nullptr;
+}
+
+static bool FlattenPathClassLoader(mirror::ClassLoader* class_loader,
+ std::list<mirror::String*>* out_dex_file_names,
+ std::string* error_msg)
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ DCHECK(out_dex_file_names != nullptr);
+ DCHECK(error_msg != nullptr);
+ ScopedObjectAccessUnchecked soa(Thread::Current());
+ ArtField* const dex_path_list_field =
+ soa.DecodeField(WellKnownClasses::dalvik_system_PathClassLoader_pathList);
+ ArtField* const dex_elements_field =
+ soa.DecodeField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
+ CHECK(dex_path_list_field != nullptr);
+ CHECK(dex_elements_field != nullptr);
+ while (!IsBootClassLoader(soa, class_loader)) {
+ if (class_loader->GetClass() !=
+ soa.Decode<mirror::Class*>(WellKnownClasses::dalvik_system_PathClassLoader)) {
+ *error_msg = StringPrintf("Unknown class loader type %s", PrettyTypeOf(class_loader).c_str());
+ // Unsupported class loader.
+ return false;
+ }
+ mirror::Object* dex_path_list = dex_path_list_field->GetObject(class_loader);
+ if (dex_path_list != nullptr) {
+ // DexPathList has an array dexElements of Elements[] which each contain a dex file.
+ mirror::Object* dex_elements_obj = dex_elements_field->GetObject(dex_path_list);
+ // Loop through each dalvik.system.DexPathList$Element's dalvik.system.DexFile and look
+ // at the mCookie which is a DexFile vector.
+ if (dex_elements_obj != nullptr) {
+ mirror::ObjectArray<mirror::Object>* dex_elements =
+ dex_elements_obj->AsObjectArray<mirror::Object>();
+ // Reverse order since we insert the parent at the front.
+ for (int32_t i = dex_elements->GetLength() - 1; i >= 0; --i) {
+ mirror::Object* const element = dex_elements->GetWithoutChecks(i);
+ if (element == nullptr) {
+ *error_msg = StringPrintf("Null dex element at index %d", i);
+ return false;
+ }
+ mirror::String* const name = GetDexPathListElementName(soa, element);
+ if (name == nullptr) {
+ *error_msg = StringPrintf("Null name for dex element at index %d", i);
+ return false;
+ }
+ out_dex_file_names->push_front(name);
+ }
+ }
+ }
+ class_loader = class_loader->GetParent();
+ }
+ return true;
+}
+
+class FixupArtMethodArrayVisitor : public ArtMethodVisitor {
+ public:
+ explicit FixupArtMethodArrayVisitor(const ImageHeader& header) : header_(header) {}
+
+ virtual void Visit(ArtMethod* method) SHARED_REQUIRES(Locks::mutator_lock_) {
+ GcRoot<mirror::Class>* resolved_types = method->GetDexCacheResolvedTypes(sizeof(void*));
+ const bool is_miranda = method->IsMiranda();
+ if (resolved_types != nullptr) {
+ bool in_image_space = false;
+ if (kIsDebugBuild || is_miranda) {
+ in_image_space = header_.GetImageSection(ImageHeader::kSectionDexCacheArrays).Contains(
+ reinterpret_cast<const uint8_t*>(resolved_types) - header_.GetImageBegin());
+ }
+ // Must be in image space for non-miranda method.
+ DCHECK(is_miranda || in_image_space)
+ << resolved_types << " is not in image starting at "
+ << reinterpret_cast<void*>(header_.GetImageBegin());
+ if (!is_miranda || in_image_space) {
+ // Go through the array so that we don't need to do a slow map lookup.
+ method->SetDexCacheResolvedTypes(*reinterpret_cast<GcRoot<mirror::Class>**>(resolved_types),
+ sizeof(void*));
+ }
+ }
+ ArtMethod** resolved_methods = method->GetDexCacheResolvedMethods(sizeof(void*));
+ if (resolved_methods != nullptr) {
+ bool in_image_space = false;
+ if (kIsDebugBuild || is_miranda) {
+ in_image_space = header_.GetImageSection(ImageHeader::kSectionDexCacheArrays).Contains(
+ reinterpret_cast<const uint8_t*>(resolved_methods) - header_.GetImageBegin());
+ }
+ // Must be in image space for non-miranda method.
+ DCHECK(is_miranda || in_image_space)
+ << resolved_methods << " is not in image starting at "
+ << reinterpret_cast<void*>(header_.GetImageBegin());
+ if (!is_miranda || in_image_space) {
+ // Go through the array so that we don't need to do a slow map lookup.
+ method->SetDexCacheResolvedMethods(*reinterpret_cast<ArtMethod***>(resolved_methods),
+ sizeof(void*));
+ }
+ }
+ }
+
+ private:
+ const ImageHeader& header_;
+};
+
+class VerifyClassInTableArtMethodVisitor : public ArtMethodVisitor {
+ public:
+ explicit VerifyClassInTableArtMethodVisitor(ClassTable* table) : table_(table) {}
+
+ virtual void Visit(ArtMethod* method)
+ SHARED_REQUIRES(Locks::mutator_lock_, Locks::classlinker_classes_lock_) {
+ mirror::Class* klass = method->GetDeclaringClass();
+ if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
+ CHECK_EQ(table_->LookupByDescriptor(klass), klass) << PrettyClass(klass);
+ }
+ }
+
+ private:
+ ClassTable* const table_;
+};
+
+void ClassLinker::UpdateAppImageClassLoadersAndDexCaches(
+ gc::space::ImageSpace* space,
+ Handle<mirror::ClassLoader> class_loader,
+ Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
+ bool added_class_table) {
+ Thread* const self = Thread::Current();
+ gc::Heap* const heap = Runtime::Current()->GetHeap();
+ const ImageHeader& header = space->GetImageHeader();
+ // Add image classes into the class table for the class loader, and fixup the dex caches and
+ // class loader fields.
+ WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
+ ClassTable* table = InsertClassTableForClassLoader(class_loader.Get());
+ // TODO: Store class table in the image to avoid manually adding the classes.
+ for (int32_t i = 0, num_dex_caches = dex_caches->GetLength(); i < num_dex_caches; i++) {
+ mirror::DexCache* const dex_cache = dex_caches->Get(i);
+ const DexFile* const dex_file = dex_cache->GetDexFile();
+ // If the oat file expects the dex cache arrays to be in the BSS, then allocate there and
+ // copy over the arrays.
+ DCHECK(dex_file != nullptr);
+ const size_t num_strings = dex_file->NumStringIds();
+ const size_t num_types = dex_file->NumTypeIds();
+ const size_t num_methods = dex_file->NumMethodIds();
+ const size_t num_fields = dex_file->NumFieldIds();
+ CHECK_EQ(num_strings, dex_cache->NumStrings());
+ CHECK_EQ(num_types, dex_cache->NumResolvedTypes());
+ CHECK_EQ(num_methods, dex_cache->NumResolvedMethods());
+ CHECK_EQ(num_fields, dex_cache->NumResolvedFields());
+ if (dex_file->GetOatDexFile() != nullptr &&
+ dex_file->GetOatDexFile()->GetDexCacheArrays() != nullptr) {
+ DexCacheArraysLayout layout(image_pointer_size_, dex_file);
+ uint8_t* const raw_arrays = dex_file->GetOatDexFile()->GetDexCacheArrays();
+ // The space is not yet visible to the GC, we can avoid the read barriers and use
+ // std::copy_n.
+ if (num_strings != 0u) {
+ GcRoot<mirror::String>* const strings =
+ reinterpret_cast<GcRoot<mirror::String>*>(raw_arrays + layout.StringsOffset());
+ for (size_t j = 0; kIsDebugBuild && j < num_strings; ++j) {
+ DCHECK(strings[j].IsNull());
+ }
+ std::copy_n(dex_cache->GetStrings(), num_strings, strings);
+ dex_cache->SetStrings(strings);
+ }
+
+ if (num_types != 0u) {
+ GcRoot<mirror::Class>* const image_resolved_types = dex_cache->GetResolvedTypes();
+ GcRoot<mirror::Class>* const types =
+ reinterpret_cast<GcRoot<mirror::Class>*>(raw_arrays + layout.TypesOffset());
+ for (size_t j = 0; kIsDebugBuild && j < num_types; ++j) {
+ DCHECK(types[j].IsNull());
+ }
+ std::copy_n(image_resolved_types, num_types, types);
+ // Store a pointer to the new location for fast ArtMethod patching without requiring map.
+ // This leaves random garbage at the start of the dex cache array, but nobody should ever
+ // read from it again.
+ *reinterpret_cast<GcRoot<mirror::Class>**>(image_resolved_types) = types;
+ dex_cache->SetResolvedTypes(types);
+ }
+ if (num_methods != 0u) {
+ ArtMethod** const methods = reinterpret_cast<ArtMethod**>(
+ raw_arrays + layout.MethodsOffset());
+ ArtMethod** const image_resolved_methods = dex_cache->GetResolvedMethods();
+ for (size_t j = 0; kIsDebugBuild && j < num_methods; ++j) {
+ DCHECK(methods[j] == nullptr);
+ }
+ std::copy_n(image_resolved_methods, num_methods, methods);
+ // Store a pointer to the new location for fast ArtMethod patching without requiring map.
+ *reinterpret_cast<ArtMethod***>(image_resolved_methods) = methods;
+ dex_cache->SetResolvedMethods(methods);
+ }
+ if (num_fields != 0u) {
+ ArtField** const fields = reinterpret_cast<ArtField**>(raw_arrays + layout.FieldsOffset());
+ for (size_t j = 0; kIsDebugBuild && j < num_fields; ++j) {
+ DCHECK(fields[j] == nullptr);
+ }
+ std::copy_n(dex_cache->GetResolvedFields(), num_fields, fields);
+ dex_cache->SetResolvedFields(fields);
+ }
+ }
+ {
+ WriterMutexLock mu2(self, dex_lock_);
+ // Make sure to do this after we update the arrays since we store the resolved types array
+ // in DexCacheData in RegisterDexFileLocked. We need the array pointer to be the one in the
+ // BSS.
+ mirror::DexCache* existing_dex_cache = FindDexCacheLocked(self,
+ *dex_file,
+ /*allow_failure*/true);
+ CHECK(existing_dex_cache == nullptr);
+ StackHandleScope<1> hs3(self);
+ RegisterDexFileLocked(*dex_file, hs3.NewHandle(dex_cache));
+ }
+ GcRoot<mirror::Class>* const types = dex_cache->GetResolvedTypes();
+ if (!added_class_table) {
+ for (int32_t j = 0; j < static_cast<int32_t>(num_types); j++) {
+ // The image space is not yet added to the heap, avoid read barriers.
+ mirror::Class* klass = types[j].Read<kWithoutReadBarrier>();
+ if (klass != nullptr) {
+ DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError);
+ // Update the class loader from the one in the image class loader to the one that loaded
+ // the app image.
+ klass->SetClassLoader(class_loader.Get());
+ // If there are multiple dex caches, there may be the same class multiple times
+ // in different dex caches. Check for this since inserting will add duplicates
+ // otherwise.
+ if (num_dex_caches > 1) {
+ mirror::Class* existing = table->LookupByDescriptor(klass);
+ if (existing != nullptr) {
+ DCHECK_EQ(existing, klass) << PrettyClass(klass);
+ } else {
+ table->Insert(klass);
+ }
+ } else {
+ table->Insert(klass);
+ }
+ // Double checked VLOG to avoid overhead.
+ if (VLOG_IS_ON(image)) {
+ VLOG(image) << PrettyClass(klass) << " " << klass->GetStatus();
+ if (!klass->IsArrayClass()) {
+ VLOG(image) << "From " << klass->GetDexCache()->GetDexFile()->GetBaseLocation();
+ }
+ VLOG(image) << "Direct methods";
+ for (ArtMethod& m : klass->GetDirectMethods(sizeof(void*))) {
+ VLOG(image) << PrettyMethod(&m);
+ }
+ VLOG(image) << "Virtual methods";
+ for (ArtMethod& m : klass->GetVirtualMethods(sizeof(void*))) {
+ VLOG(image) << PrettyMethod(&m);
+ }
+ }
+ }
+ }
+ }
+ if (kIsDebugBuild) {
+ for (int32_t j = 0; j < static_cast<int32_t>(num_types); j++) {
+ // The image space is not yet added to the heap, avoid read barriers.
+ mirror::Class* klass = types[j].Read<kWithoutReadBarrier>();
+ if (klass != nullptr) {
+ DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError);
+ if (kIsDebugBuild) {
+ DCHECK_EQ(table->LookupByDescriptor(klass), klass);
+ mirror::Class* super_class = klass->GetSuperClass();
+ if (super_class != nullptr && !heap->ObjectIsInBootImageSpace(super_class)) {
+ CHECK_EQ(table->LookupByDescriptor(super_class), super_class);
+ }
+ }
+ DCHECK_EQ(klass->GetClassLoader(), class_loader.Get());
+ if (kIsDebugBuild) {
+ for (ArtMethod& m : klass->GetDirectMethods(sizeof(void*))) {
+ const void* code = m.GetEntryPointFromQuickCompiledCode();
+ const void* oat_code = m.IsInvokable() ? GetQuickOatCodeFor(&m) : code;
+ if (!IsQuickResolutionStub(code) &&
+ !IsQuickGenericJniStub(code) &&
+ !IsQuickToInterpreterBridge(code) &&
+ !m.IsNative()) {
+ DCHECK_EQ(code, oat_code) << PrettyMethod(&m);
+ }
+ }
+ VLOG(image) << "Virtual methods";
+ for (ArtMethod& m : klass->GetVirtualMethods(sizeof(void*))) {
+ const void* code = m.GetEntryPointFromQuickCompiledCode();
+ const void* oat_code = m.IsInvokable() ? GetQuickOatCodeFor(&m) : code;
+ if (!IsQuickResolutionStub(code) &&
+ !IsQuickGenericJniStub(code) &&
+ !IsQuickToInterpreterBridge(code) &&
+ !m.IsNative()) {
+ DCHECK_EQ(code, oat_code) << PrettyMethod(&m);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ {
+ FixupArtMethodArrayVisitor visitor(header);
+ header.GetImageSection(ImageHeader::kSectionArtMethods).VisitPackedArtMethods(
+ &visitor, space->Begin(), sizeof(void*));
+ Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(class_loader.Get());
+ }
+ if (kIsDebugBuild) {
+ ClassTable* const class_table = class_loader.Get()->GetClassTable();
+ VerifyClassInTableArtMethodVisitor visitor2(class_table);
+ header.GetImageSection(ImageHeader::kSectionArtMethods).VisitPackedArtMethods(
+ &visitor2, space->Begin(), sizeof(void*));
+ }
+}
+
+bool ClassLinker::AddImageSpace(
+ gc::space::ImageSpace* space,
+ Handle<mirror::ClassLoader> class_loader,
+ jobjectArray dex_elements,
+ const char* dex_location,
+ std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
+ std::string* error_msg) {
+ DCHECK(out_dex_files != nullptr);
+ DCHECK(error_msg != nullptr);
+ const uint64_t start_time = NanoTime();
+ const bool app_image = class_loader.Get() != nullptr;
+ const ImageHeader& header = space->GetImageHeader();
+ mirror::Object* dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
+ DCHECK(dex_caches_object != nullptr);
+ Runtime* const runtime = Runtime::Current();
+ gc::Heap* const heap = runtime->GetHeap();
+ Thread* const self = Thread::Current();
+ StackHandleScope<2> hs(self);
+ Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
+ hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
+ Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
+ header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
+ const OatFile* oat_file = space->GetOatFile();
+ std::unordered_set<mirror::ClassLoader*> image_class_loaders;
+ // Check that the image is what we are expecting.
+ if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
+ *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
+ static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
+ image_pointer_size_);
+ return false;
+ }
+ DCHECK(class_roots.Get() != nullptr);
+ if (class_roots->GetLength() != static_cast<int32_t>(kClassRootsMax)) {
+ *error_msg = StringPrintf("Expected %d class roots but got %d",
+ class_roots->GetLength(),
+ static_cast<int32_t>(kClassRootsMax));
+ return false;
+ }
+ // Check against existing class roots to make sure they match the ones in the boot image.
+ for (size_t i = 0; i < kClassRootsMax; i++) {
+ if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i))) {
+ *error_msg = "App image class roots must have pointer equality with runtime ones.";
+ return false;
+ }
+ }
+ if (oat_file->GetOatHeader().GetDexFileCount() !=
+ static_cast<uint32_t>(dex_caches->GetLength())) {
+ *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
+ "image";
+ return false;
+ }
+
+ StackHandleScope<1> hs2(self);
+ MutableHandle<mirror::DexCache> h_dex_cache(hs2.NewHandle<mirror::DexCache>(nullptr));
+ for (int32_t i = 0; i < dex_caches->GetLength(); i++) {
+ h_dex_cache.Assign(dex_caches->Get(i));
+ std::string dex_file_location(h_dex_cache->GetLocation()->ToModifiedUtf8());
+ // TODO: Only store qualified paths.
+ // If non qualified, qualify it.
+ if (dex_file_location.find('/') == std::string::npos) {
+ std::string dex_location_path = dex_location;
+ const size_t pos = dex_location_path.find_last_of('/');
+ CHECK_NE(pos, std::string::npos);
+ dex_location_path = dex_location_path.substr(0, pos + 1); // Keep trailing '/'
+ dex_file_location = dex_location_path + dex_file_location;
+ }
+ const OatFile::OatDexFile* oat_dex_file = oat_file->GetOatDexFile(dex_file_location.c_str(),
+ nullptr);
+ if (oat_dex_file == nullptr) {
+ *error_msg = StringPrintf("Failed finding oat dex file for %s %s",
+ oat_file->GetLocation().c_str(),
+ dex_file_location.c_str());
+ return false;
+ }
+ std::string inner_error_msg;
+ std::unique_ptr<const DexFile> dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
+ if (dex_file == nullptr) {
+ *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
+ dex_file_location.c_str(),
+ oat_file->GetLocation().c_str(),
+ inner_error_msg.c_str());
+ return false;
+ }
+
+ if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
+ *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
+ dex_file_location.c_str(),
+ dex_file->GetLocationChecksum(),
+ oat_dex_file->GetDexFileLocationChecksum());
+ return false;
+ }
+
+ if (app_image) {
+ // The current dex file field is bogus, overwrite it so that we can get the dex file in the
+ // loop below.
+ h_dex_cache->SetDexFile(dex_file.get());
+ // Check that each class loader resolved the same way.
+ // TODO: Store image class loaders as image roots.
+ GcRoot<mirror::Class>* const types = h_dex_cache->GetResolvedTypes();
+ for (int32_t j = 0, num_types = h_dex_cache->NumResolvedTypes(); j < num_types; j++) {
+ mirror::Class* klass = types[j].Read();
+ if (klass != nullptr) {
+ DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError);
+ mirror::ClassLoader* image_class_loader = klass->GetClassLoader();
+ image_class_loaders.insert(image_class_loader);
+ }
+ }
+ } else {
+ if (kSanityCheckObjects) {
+ SanityCheckArtMethodPointerArray(h_dex_cache->GetResolvedMethods(),
+ h_dex_cache->NumResolvedMethods(),
+ image_pointer_size_,
+ heap->GetBootImageSpaces());
+ }
+ // Register dex files, keep track of existing ones that are conflicts.
+ AppendToBootClassPath(*dex_file.get(), h_dex_cache);
+ }
+ out_dex_files->push_back(std::move(dex_file));
+ }
+
+ if (app_image) {
+ ScopedObjectAccessUnchecked soa(Thread::Current());
+ // Check that the class loader resolves the same way as the ones in the image.
+ // Image class loader [A][B][C][image dex files]
+ // Class loader = [???][dex_elements][image dex files]
+ // Need to ensure that [???][dex_elements] == [A][B][C].
+ // For each class loader, PathClassLoader, the laoder checks the parent first. Also the logic
+ // for PathClassLoader does this by looping through the array of dex files. To ensure they
+ // resolve the same way, simply flatten the hierarchy in the way the resolution order would be,
+ // and check that the dex file names are the same.
+ for (mirror::ClassLoader* image_class_loader : image_class_loaders) {
+ std::list<mirror::String*> image_dex_file_names;
+ std::string temp_error_msg;
+ if (!FlattenPathClassLoader(image_class_loader, &image_dex_file_names, &temp_error_msg)) {
+ *error_msg = StringPrintf("Failed to flatten image class loader hierarchy '%s'",
+ temp_error_msg.c_str());
+ return false;
+ }
+ std::list<mirror::String*> loader_dex_file_names;
+ if (!FlattenPathClassLoader(class_loader.Get(), &loader_dex_file_names, &temp_error_msg)) {
+ *error_msg = StringPrintf("Failed to flatten class loader hierarchy '%s'",
+ temp_error_msg.c_str());
+ return false;
+ }
+ // Add the temporary dex path list elements at the end.
+ auto* elements = soa.Decode<mirror::ObjectArray<mirror::Object>*>(dex_elements);
+ for (size_t i = 0, num_elems = elements->GetLength(); i < num_elems; ++i) {
+ mirror::Object* element = elements->GetWithoutChecks(i);
+ if (element != nullptr) {
+ // If we are somewhere in the middle of the array, there may be nulls at the end.
+ loader_dex_file_names.push_back(GetDexPathListElementName(soa, element));
+ }
+ }
+ // Ignore the number of image dex files since we are adding those to the class loader anyways.
+ CHECK_GE(static_cast<size_t>(image_dex_file_names.size()),
+ static_cast<size_t>(dex_caches->GetLength()));
+ size_t image_count = image_dex_file_names.size() - dex_caches->GetLength();
+ // Check that the dex file names match.
+ bool equal = image_count == loader_dex_file_names.size();
+ if (equal) {
+ auto it1 = image_dex_file_names.begin();
+ auto it2 = loader_dex_file_names.begin();
+ for (size_t i = 0; equal && i < image_count; ++i, ++it1, ++it2) {
+ equal = equal && (*it1)->Equals(*it2);
+ }
+ }
+ if (!equal) {
+ *error_msg = "Rejecting application image due to class loader mismatch";
+ return false;
+ }
+ }
+ }
+
+ if (kSanityCheckObjects) {
+ for (int32_t i = 0; i < dex_caches->GetLength(); i++) {
+ auto* dex_cache = dex_caches->Get(i);
+ for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) {
+ auto* field = dex_cache->GetResolvedField(j, image_pointer_size_);
+ if (field != nullptr) {
+ CHECK(field->GetDeclaringClass()->GetClass() != nullptr);
+ }
+ }
+ }
+ if (!app_image) {
+ heap->VisitObjects(SanityCheckObjectsCallback, nullptr);
+ }
+ }
+
+ // Set entry point to interpreter if in InterpretOnly mode.
+ if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
+ const ImageSection& methods = header.GetMethodsSection();
+ SetInterpreterEntrypointArtMethodVisitor visitor(image_pointer_size_);
+ methods.VisitPackedArtMethods(&visitor, space->Begin(), image_pointer_size_);
+ }
+
+ const ImageSection& class_table_section = header.GetImageSection(ImageHeader::kSectionClassTable);
+ bool added_class_table = false;
+ if (app_image) {
+ GetOrCreateAllocatorForClassLoader(class_loader.Get()); // Make sure we have a linear alloc.
+ }
+ if (class_table_section.Size() > 0u) {
+ const uint64_t start_time2 = NanoTime();
+ WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
+ ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
+ class_table->ReadFromMemory(space->Begin() + class_table_section.Offset());
+ if (app_image) {
+ class_table->SetClassLoader(class_loader.Get());
+ } else {
+ dex_cache_boot_image_class_lookup_required_ = false;
+ }
+ VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
+ added_class_table = true;
+ }
+ if (app_image) {
+ UpdateAppImageClassLoadersAndDexCaches(space, class_loader, dex_caches, added_class_table);
+ }
+ VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
return true;
}
@@ -1527,14 +1971,6 @@
return ClassPathEntry(nullptr, nullptr);
}
-static bool IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
- mirror::ClassLoader* class_loader)
- SHARED_REQUIRES(Locks::mutator_lock_) {
- return class_loader == nullptr ||
- class_loader->GetClass() ==
- soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_BootClassLoader);
-}
-
bool ClassLinker::FindClassInPathClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
Thread* self,
const char* descriptor,
@@ -1820,6 +2256,7 @@
// inserted before we allocate / fill in these fields.
LoadClass(self, dex_file, dex_class_def, klass);
if (self->IsExceptionPending()) {
+ VLOG(class_linker) << self->GetException()->Dump();
// An exception occured during load, set status to erroneous while holding klass' lock in case
// notification is necessary.
if (!klass->IsErroneous()) {
@@ -2487,7 +2924,20 @@
Thread* const self = Thread::Current();
dex_lock_.AssertExclusiveHeld(self);
CHECK(dex_cache.Get() != nullptr) << dex_file.GetLocation();
- CHECK(dex_cache->GetLocation()->Equals(dex_file.GetLocation()))
+ // For app images, the dex cache location may be a suffix of the dex file location since the
+ // dex file location is an absolute path.
+ const size_t dex_cache_length = dex_cache->GetLocation()->GetLength();
+ CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
+ std::string dex_file_location = dex_file.GetLocation();
+ CHECK_GE(dex_file_location.length(), dex_cache_length)
+ << dex_cache->GetLocation()->ToModifiedUtf8() << " " << dex_file.GetLocation();
+ // Take suffix.
+ const std::string dex_file_suffix = dex_file_location.substr(
+ dex_file_location.length() - dex_cache_length,
+ dex_cache_length);
+ // Example dex_cache location is SettingsProvider.apk and
+ // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
+ CHECK(dex_cache->GetLocation()->Equals(dex_file_suffix))
<< dex_cache->GetLocation()->ToModifiedUtf8() << " " << dex_file.GetLocation();
// Clean up pass to remove null dex caches.
// Null dex caches can occur due to class unloading and we are lazily removing null entries.
@@ -6931,10 +7381,13 @@
ArtField* cookie_field = soa.DecodeField(WellKnownClasses::dalvik_system_DexFile_cookie);
DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->GetType<false>());
+ ArtField* file_name_field = soa.DecodeField(WellKnownClasses::dalvik_system_DexFile_fileName);
+ DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->GetType<false>());
+
// Fill the elements array.
int32_t index = 0;
for (const DexFile* dex_file : dex_files) {
- StackHandleScope<3> hs2(self);
+ StackHandleScope<4> hs2(self);
// CreatePathClassLoader is only used by gtests. Index 0 of h_long_array is supposed to be the
// oat file but we can leave it null.
@@ -6949,6 +7402,11 @@
DCHECK(h_dex_file.Get() != nullptr);
cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
+ Handle<mirror::String> h_file_name = hs2.NewHandle(
+ mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
+ DCHECK(h_file_name.Get() != nullptr);
+ file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
+
Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
DCHECK(h_element.Get() != nullptr);
element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
@@ -7048,6 +7506,7 @@
if (class_loader != nullptr) {
++it;
} else {
+ VLOG(class_linker) << "Freeing class loader";
DeleteClassLoader(self, data);
it = class_loaders_.erase(it);
}
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index f1fd0c3..d503dd4 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -120,11 +120,25 @@
SHARED_REQUIRES(Locks::mutator_lock_)
REQUIRES(!dex_lock_);
- // Initialize class linker from one or more images.
- bool InitFromImage(std::string* error_msg)
+ // Initialize class linker from one or more boot images.
+ bool InitFromBootImage(std::string* error_msg)
SHARED_REQUIRES(Locks::mutator_lock_)
REQUIRES(!dex_lock_);
+ // Add an image space to the class linker, may fix up classloader fields and dex cache fields.
+ // The dex files that were newly opened for the space are placed in the out argument
+ // out_dex_files. Returns true if the operation succeeded.
+ // The space must be already added to the heap before calling AddImageSpace since we need to
+ // properly handle read barriers and object marking.
+ bool AddImageSpace(gc::space::ImageSpace* space,
+ Handle<mirror::ClassLoader> class_loader,
+ jobjectArray dex_elements,
+ const char* dex_location,
+ std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
+ std::string* error_msg)
+ REQUIRES(!dex_lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
// Finds a class by its descriptor, loading it if necessary.
// If class_loader is null, searches boot_class_path_.
mirror::Class* FindClass(Thread* self,
@@ -985,8 +999,16 @@
SHARED_REQUIRES(Locks::mutator_lock_)
REQUIRES(!Locks::classlinker_classes_lock_);
+ void UpdateAppImageClassLoadersAndDexCaches(
+ gc::space::ImageSpace* space,
+ Handle<mirror::ClassLoader> class_loader,
+ Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
+ bool added_class_table)
+ REQUIRES(!dex_lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
std::vector<const DexFile*> boot_class_path_;
- std::vector<std::unique_ptr<const DexFile>> opened_dex_files_;
+ std::vector<std::unique_ptr<const DexFile>> boot_dex_files_;
mutable ReaderWriterMutex dex_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
// JNI weak globals and side data to allow dex caches to get unloaded. We lazily delete weak
diff --git a/runtime/class_table.cc b/runtime/class_table.cc
index df2dbf4..2a4f0e01 100644
--- a/runtime/class_table.cc
+++ b/runtime/class_table.cc
@@ -40,6 +40,16 @@
return false;
}
+mirror::Class* ClassTable::LookupByDescriptor(mirror::Class* klass) {
+ for (ClassSet& class_set : classes_) {
+ auto it = class_set.Find(GcRoot<mirror::Class>(klass));
+ if (it != class_set.end()) {
+ return it->Read();
+ }
+ }
+ return nullptr;
+}
+
mirror::Class* ClassTable::UpdateClass(const char* descriptor, mirror::Class* klass, size_t hash) {
// Should only be updating latest table.
auto existing_it = classes_.back().FindWithHash(descriptor, hash);
@@ -173,4 +183,12 @@
return read_count;
}
+void ClassTable::SetClassLoader(mirror::ClassLoader* class_loader) {
+ for (const ClassSet& class_set : classes_) {
+ for (const GcRoot<mirror::Class>& root : class_set) {
+ root.Read()->SetClassLoader(class_loader);
+ }
+ }
+}
+
} // namespace art
diff --git a/runtime/class_table.h b/runtime/class_table.h
index 911f3c2..0b42035 100644
--- a/runtime/class_table.h
+++ b/runtime/class_table.h
@@ -84,9 +84,14 @@
bool Visit(ClassVisitor* visitor)
SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ // Return the first class that matches the descriptor. Returns null if there are none.
mirror::Class* Lookup(const char* descriptor, size_t hash)
SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ // Return the first class that matches the descriptor of klass. Returns null if there are none.
+ mirror::Class* LookupByDescriptor(mirror::Class* klass)
+ SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+
void Insert(mirror::Class* klass)
REQUIRES(Locks::classlinker_classes_lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
@@ -107,10 +112,17 @@
// Combines all of the tables into one class set.
size_t WriteToMemory(uint8_t* ptr) const
SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+
+ // Read a table from ptr and put it at the front of the class set.
size_t ReadFromMemory(uint8_t* ptr)
REQUIRES(Locks::classlinker_classes_lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
+ // Change the class loader of all the contained classes.
+ void SetClassLoader(mirror::ClassLoader* class_loader)
+ REQUIRES(Locks::classlinker_classes_lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
private:
class ClassDescriptorHashEquals {
public:
diff --git a/runtime/gc/accounting/card_table.h b/runtime/gc/accounting/card_table.h
index 88a6c6c..b6af908 100644
--- a/runtime/gc/accounting/card_table.h
+++ b/runtime/gc/accounting/card_table.h
@@ -115,6 +115,8 @@
// Resets all of the bytes in the card table to clean.
void ClearCardTable();
+
+ // Clear a range of cards that covers start to end, start and end must be aligned to kCardSize.
void ClearCardRange(uint8_t* start, uint8_t* end);
// Resets all of the bytes in the card table which do not map to the image space.
diff --git a/runtime/gc/accounting/space_bitmap-inl.h b/runtime/gc/accounting/space_bitmap-inl.h
index 61c67f8..4cf5b4f 100644
--- a/runtime/gc/accounting/space_bitmap-inl.h
+++ b/runtime/gc/accounting/space_bitmap-inl.h
@@ -167,8 +167,12 @@
uintptr_t* address = &bitmap_begin_[index];
uintptr_t old_word = *address;
if (kSetBit) {
+ // Check the bit before setting the word incase we are trying to mark a read only bitmap
+ // like an image space bitmap. This bitmap is mapped as read only and will fault if we
+ // attempt to change any words. Since all of the objects are marked, this will never
+ // occur if we check before setting the bit. This also prevents dirty pages that would
+ // occur if the bitmap was read write and we did not check the bit.
if ((old_word & mask) == 0) {
- // Avoid dirtying the page if possible.
*address = old_word | mask;
}
} else {
diff --git a/runtime/gc/collector/immune_spaces_test.cc b/runtime/gc/collector/immune_spaces_test.cc
index 4884e66..ea290dd 100644
--- a/runtime/gc/collector/immune_spaces_test.cc
+++ b/runtime/gc/collector/immune_spaces_test.cc
@@ -112,8 +112,13 @@
/*oat_data_begin*/PointerToLowMemUInt32(map->End()),
/*oat_data_end*/PointerToLowMemUInt32(map->End() + oat_size),
/*oat_file_end*/PointerToLowMemUInt32(map->End() + oat_size),
+ /*boot_image_begin*/0u,
+ /*boot_image_size*/0u,
+ /*boot_oat_begin*/0u,
+ /*boot_oat_size*/0u,
/*pointer_size*/sizeof(void*),
/*compile_pic*/false,
+ /*is_pic*/false,
ImageHeader::kStorageModeUncompressed,
/*storage_size*/0u);
return new DummyImageSpace(map.release(), live_bitmap.release());
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 2fb5e34..8cd8d73 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -273,10 +273,11 @@
std::string& image_name = image_file_names[index];
ATRACE_BEGIN("ImageSpace::Create");
std::string error_msg;
- space::ImageSpace* boot_image_space = space::ImageSpace::Create(image_name.c_str(),
- image_instruction_set,
- index > 0,
- &error_msg);
+ space::ImageSpace* boot_image_space = space::ImageSpace::CreateBootImage(
+ image_name.c_str(),
+ image_instruction_set,
+ index > 0,
+ &error_msg);
ATRACE_END();
if (boot_image_space != nullptr) {
AddSpace(boot_image_space);
@@ -491,7 +492,15 @@
ATRACE_END();
// Allocate the card table.
ATRACE_BEGIN("Create card table");
- card_table_.reset(accounting::CardTable::Create(heap_begin, heap_capacity));
+ // We currently don't support dynamically resizing the card table.
+ // Since we don't know where in the low_4gb the app image will be located, make the card table
+ // cover the whole low_4gb. TODO: Extend the card table in AddSpace.
+ UNUSED(heap_capacity);
+ // Start at 64 KB, we can be sure there are no spaces mapped this low since the address range is
+ // reserved by the kernel.
+ static constexpr size_t kMinHeapAddress = 4 * KB;
+ card_table_.reset(accounting::CardTable::Create(reinterpret_cast<uint8_t*>(kMinHeapAddress),
+ 4 * GB - kMinHeapAddress));
CHECK(card_table_.get() != nullptr) << "Failed to create card table";
ATRACE_END();
if (foreground_collector_type_ == kCollectorTypeCC && kUseTableLookupReadBarrier) {
@@ -1252,10 +1261,6 @@
return FindDiscontinuousSpaceFromObject(obj, fail_ok);
}
-std::vector<space::ImageSpace*> Heap::GetBootImageSpaces() const {
- return boot_image_spaces_;
-}
-
void Heap::ThrowOutOfMemoryError(Thread* self, size_t byte_count, AllocatorType allocator_type) {
std::ostringstream oss;
size_t total_bytes_free = GetFreeMemory();
@@ -3194,7 +3199,13 @@
} else if (process_alloc_space_cards) {
TimingLogger::ScopedTiming t2("AllocSpaceClearCards", timings);
if (clear_alloc_space_cards) {
- card_table_->ClearCardRange(space->Begin(), space->End());
+ uint8_t* end = space->End();
+ if (space->IsImageSpace()) {
+ // Image space end is the end of the mirror objects, it is not necessarily page or card
+ // aligned. Align up so that the check in ClearCardRange does not fail.
+ end = AlignUp(end, accounting::CardTable::kCardSize);
+ }
+ card_table_->ClearCardRange(space->Begin(), end);
} else {
// No mod union table for the AllocSpace. Age the cards so that the GC knows that these
// cards were dirty before the GC started.
@@ -3989,5 +4000,43 @@
gc_disabled_for_shutdown_ = true;
}
+bool Heap::ObjectIsInBootImageSpace(mirror::Object* obj) const {
+ for (gc::space::ImageSpace* space : boot_image_spaces_) {
+ if (space->HasAddress(obj)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void Heap::GetBootImagesSize(uint32_t* boot_image_begin,
+ uint32_t* boot_image_end,
+ uint32_t* boot_oat_begin,
+ uint32_t* boot_oat_end) {
+ DCHECK(boot_image_begin != nullptr);
+ DCHECK(boot_image_end != nullptr);
+ DCHECK(boot_oat_begin != nullptr);
+ DCHECK(boot_oat_end != nullptr);
+ *boot_image_begin = 0u;
+ *boot_image_end = 0u;
+ *boot_oat_begin = 0u;
+ *boot_oat_end = 0u;
+ for (gc::space::ImageSpace* space_ : GetBootImageSpaces()) {
+ const uint32_t image_begin = PointerToLowMemUInt32(space_->Begin());
+ const uint32_t image_size = space_->GetImageHeader().GetImageSize();
+ if (*boot_image_begin == 0 || image_begin < *boot_image_begin) {
+ *boot_image_begin = image_begin;
+ }
+ *boot_image_end = std::max(*boot_image_end, image_begin + image_size);
+ const OatFile* boot_oat_file = space_->GetOatFile();
+ const uint32_t oat_begin = PointerToLowMemUInt32(boot_oat_file->Begin());
+ const uint32_t oat_size = boot_oat_file->Size();
+ if (*boot_oat_begin == 0 || oat_begin < *boot_oat_begin) {
+ *boot_oat_begin = oat_begin;
+ }
+ *boot_oat_end = std::max(*boot_oat_end, oat_begin + oat_size);
+ }
+}
+
} // namespace gc
} // namespace art
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 7b531ba..1b7e2c9 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -580,7 +580,17 @@
void UnBindBitmaps() REQUIRES(Locks::heap_bitmap_lock_);
// Returns the boot image spaces. There may be multiple boot image spaces.
- std::vector<space::ImageSpace*> GetBootImageSpaces() const;
+ const std::vector<space::ImageSpace*>& GetBootImageSpaces() const {
+ return boot_image_spaces_;
+ }
+
+ bool ObjectIsInBootImageSpace(mirror::Object* obj) const
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
+ void GetBootImagesSize(uint32_t* boot_image_begin,
+ uint32_t* boot_image_end,
+ uint32_t* boot_oat_begin,
+ uint32_t* boot_oat_end);
// Permenantly disable moving garbage collection.
void DisableMovingGc() REQUIRES(!*gc_complete_lock_);
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 5f6bb8e..9ff3d8d 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -475,10 +475,10 @@
return true;
}
-ImageSpace* ImageSpace::Create(const char* image_location,
- const InstructionSet image_isa,
- bool secondary_image,
- std::string* error_msg) {
+ImageSpace* ImageSpace::CreateBootImage(const char* image_location,
+ const InstructionSet image_isa,
+ bool secondary_image,
+ std::string* error_msg) {
std::string system_filename;
bool has_system = false;
std::string cache_filename;
@@ -584,8 +584,13 @@
// assume this if we are using a relocated image (i.e. image checksum
// matches) since this is only different by the offset. We need this to
// make sure that host tests continue to work.
- space = ImageSpace::Init(image_filename->c_str(), image_location,
- !(is_system || relocated_version_used), error_msg);
+ // Since we are the boot image, pass null since we load the oat file from the boot image oat
+ // file name.
+ space = ImageSpace::Init(image_filename->c_str(),
+ image_location,
+ !(is_system || relocated_version_used),
+ /* oat_file */nullptr,
+ error_msg);
}
if (space != nullptr) {
return space;
@@ -646,7 +651,7 @@
// we leave Create.
ScopedFlock image_lock;
image_lock.Init(cache_filename.c_str(), error_msg);
- space = ImageSpace::Init(cache_filename.c_str(), image_location, true, error_msg);
+ space = ImageSpace::Init(cache_filename.c_str(), image_location, true, nullptr, error_msg);
if (space == nullptr) {
*error_msg = StringPrintf("Failed to load generated image '%s': %s",
cache_filename.c_str(), error_msg->c_str());
@@ -669,34 +674,494 @@
}
}
-ImageSpace* ImageSpace::Init(const char* image_filename, const char* image_location,
- bool validate_oat_file, std::string* error_msg) {
+// Helper class for relocating from one range of memory to another.
+class RelocationRange {
+ public:
+ RelocationRange() = default;
+ RelocationRange(const RelocationRange&) = default;
+ RelocationRange(uintptr_t source, uintptr_t dest, uintptr_t length)
+ : source_(source),
+ dest_(dest),
+ length_(length) {}
+
+ bool ContainsSource(uintptr_t address) const {
+ return address - source_ < length_;
+ }
+
+ // Translate a source address to the destination space.
+ uintptr_t ToDest(uintptr_t address) const {
+ DCHECK(ContainsSource(address));
+ return address + Delta();
+ }
+
+ // Returns the delta between the dest from the source.
+ off_t Delta() const {
+ return dest_ - source_;
+ }
+
+ uintptr_t Source() const {
+ return source_;
+ }
+
+ uintptr_t Dest() const {
+ return dest_;
+ }
+
+ uintptr_t Length() const {
+ return length_;
+ }
+
+ private:
+ const uintptr_t source_;
+ const uintptr_t dest_;
+ const uintptr_t length_;
+};
+
+class FixupVisitor : public ValueObject {
+ public:
+ FixupVisitor(const RelocationRange& boot_image,
+ const RelocationRange& boot_oat,
+ const RelocationRange& app_image,
+ const RelocationRange& app_oat)
+ : boot_image_(boot_image),
+ boot_oat_(boot_oat),
+ app_image_(app_image),
+ app_oat_(app_oat) {}
+
+ // Return the relocated address of a heap object.
+ template <typename T>
+ ALWAYS_INLINE T* ForwardObject(T* src) const {
+ const uintptr_t uint_src = reinterpret_cast<uintptr_t>(src);
+ if (boot_image_.ContainsSource(uint_src)) {
+ return reinterpret_cast<T*>(boot_image_.ToDest(uint_src));
+ }
+ if (app_image_.ContainsSource(uint_src)) {
+ return reinterpret_cast<T*>(app_image_.ToDest(uint_src));
+ }
+ return src;
+ }
+
+ // Return the relocated address of a code pointer (contained by an oat file).
+ ALWAYS_INLINE const void* ForwardCode(const void* src) const {
+ const uintptr_t uint_src = reinterpret_cast<uintptr_t>(src);
+ if (boot_oat_.ContainsSource(uint_src)) {
+ return reinterpret_cast<const void*>(boot_oat_.ToDest(uint_src));
+ }
+ if (app_oat_.ContainsSource(uint_src)) {
+ return reinterpret_cast<const void*>(app_oat_.ToDest(uint_src));
+ }
+ return src;
+ }
+
+ protected:
+ // Source section.
+ const RelocationRange boot_image_;
+ const RelocationRange boot_oat_;
+ const RelocationRange app_image_;
+ const RelocationRange app_oat_;
+};
+
+std::ostream& operator<<(std::ostream& os, const RelocationRange& reloc) {
+ return os << "(" << reinterpret_cast<const void*>(reloc.Source()) << "-"
+ << reinterpret_cast<const void*>(reloc.Source() + reloc.Length()) << ")->("
+ << reinterpret_cast<const void*>(reloc.Dest()) << "-"
+ << reinterpret_cast<const void*>(reloc.Dest() + reloc.Length()) << ")";
+}
+
+// Adapt for mirror::Class::FixupNativePointers.
+class FixupObjectAdapter : public FixupVisitor {
+ public:
+ template<typename... Args>
+ explicit FixupObjectAdapter(Args... args) : FixupVisitor(args...) {}
+
+ template <typename T>
+ T* operator()(T* obj) const {
+ return ForwardObject(obj);
+ }
+};
+
+class FixupClassVisitor : public FixupVisitor {
+ public:
+ template<typename... Args>
+ explicit FixupClassVisitor(Args... args) : FixupVisitor(args...) {}
+
+ // The image space is contained so the GC doesn't need to know about it. Avoid requiring mutator
+ // lock to prevent possible pauses.
+ ALWAYS_INLINE void operator()(mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
+ mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
+ DCHECK(klass != nullptr) << "Null class in image";
+ // No AsClass since our fields aren't quite fixed up yet.
+ mirror::Class* new_klass = down_cast<mirror::Class*>(ForwardObject(klass));
+ // Keep clean if possible.
+ if (klass != new_klass) {
+ obj->SetClass<kVerifyNone>(new_klass);
+ }
+ }
+};
+
+class FixupRootVisitor : public FixupVisitor {
+ public:
+ template<typename... Args>
+ explicit FixupRootVisitor(Args... args) : FixupVisitor(args...) {}
+
+ ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ if (!root->IsNull()) {
+ VisitRoot(root);
+ }
+ }
+
+ ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ mirror::Object* ref = root->AsMirrorPtr();
+ mirror::Object* new_ref = ForwardObject(ref);
+ if (ref != new_ref) {
+ root->Assign(new_ref);
+ }
+ }
+};
+
+class FixupObjectVisitor : public FixupVisitor {
+ public:
+ template<typename... Args>
+ explicit FixupObjectVisitor(Args... args) : FixupVisitor(args...) {}
+
+ // Fix up separately since we also need to fix up method entrypoints.
+ ALWAYS_INLINE void VisitRootIfNonNull(
+ mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+
+ ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
+ const {}
+
+ ALWAYS_INLINE void operator()(mirror::Object* obj,
+ MemberOffset offset,
+ bool is_static ATTRIBUTE_UNUSED) const
+ NO_THREAD_SAFETY_ANALYSIS {
+ // There could be overlap between ranges, we must avoid visiting the same reference twice.
+ // Avoid the class field since we already fixed it up in FixupClassVisitor.
+ if (offset.Uint32Value() != mirror::Object::ClassOffset().Uint32Value()) {
+ // Space is not yet added to the heap, don't do a read barrier.
+ mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(
+ offset);
+ // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
+ // image.
+ obj->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(offset, ForwardObject(ref));
+ }
+ }
+
+ // java.lang.ref.Reference visitor.
+ void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
+ SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
+ mirror::Object* obj = ref->GetReferent<kWithoutReadBarrier>();
+ ref->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
+ mirror::Reference::ReferentOffset(),
+ ForwardObject(obj));
+ }
+
+ ALWAYS_INLINE void operator()(mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
+ obj->VisitReferences</*visit native roots*/false, kVerifyNone, kWithoutReadBarrier>(
+ *this,
+ *this);
+ // We want to use our own class loader and not the one in the image.
+ if (obj->IsClass<kVerifyNone, kWithoutReadBarrier>()) {
+ mirror::Class* klass = obj->AsClass<kVerifyNone, kWithoutReadBarrier>();
+ FixupObjectAdapter visitor(boot_image_, boot_oat_, app_image_, app_oat_);
+ klass->FixupNativePointers(klass, sizeof(void*), visitor);
+ // Deal with the arrays.
+ mirror::PointerArray* vtable = klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
+ if (vtable != nullptr) {
+ vtable->Fixup(vtable, sizeof(void*), visitor);
+ }
+ mirror::IfTable* iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
+ if (iftable != nullptr) {
+ for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
+ if (iftable->GetMethodArrayCount(i) > 0) {
+ mirror::PointerArray* methods =
+ iftable->GetMethodArray<kVerifyNone, kWithoutReadBarrier>(i);
+ DCHECK(methods != nullptr);
+ methods->Fixup(methods, sizeof(void*), visitor);
+ }
+ }
+ }
+ }
+ }
+};
+
+class ForwardObjectAdapter {
+ public:
+ ALWAYS_INLINE ForwardObjectAdapter(const FixupVisitor* visitor) : visitor_(visitor) {}
+
+ template <typename T>
+ ALWAYS_INLINE T* operator()(T* src) const {
+ return visitor_->ForwardObject(src);
+ }
+
+ private:
+ const FixupVisitor* const visitor_;
+};
+
+class ForwardCodeAdapter {
+ public:
+ ALWAYS_INLINE ForwardCodeAdapter(const FixupVisitor* visitor) : visitor_(visitor) {}
+
+ template <typename T>
+ ALWAYS_INLINE T* operator()(T* src) const {
+ return visitor_->ForwardCode(src);
+ }
+
+ private:
+ const FixupVisitor* const visitor_;
+};
+
+class FixupArtMethodVisitor : public FixupVisitor, public ArtMethodVisitor {
+ public:
+ template<typename... Args>
+ explicit FixupArtMethodVisitor(bool fixup_heap_objects, Args... args)
+ : FixupVisitor(args...),
+ fixup_heap_objects_(fixup_heap_objects) {}
+
+ virtual void Visit(ArtMethod* method) NO_THREAD_SAFETY_ANALYSIS {
+ if (fixup_heap_objects_) {
+ method->UpdateObjectsForImageRelocation(ForwardObjectAdapter(this));
+ }
+ method->UpdateEntrypoints(ForwardCodeAdapter(this));
+ }
+
+ private:
+ const bool fixup_heap_objects_;
+};
+
+class FixupArtFieldVisitor : public FixupVisitor, public ArtFieldVisitor {
+ public:
+ template<typename... Args>
+ explicit FixupArtFieldVisitor(Args... args) : FixupVisitor(args...) {}
+
+ virtual void Visit(ArtField* field) NO_THREAD_SAFETY_ANALYSIS {
+ field->UpdateObjects(ForwardObjectAdapter(this));
+ }
+};
+
+// Relocate an image space mapped at target_base which possibly used to be at a different base
+// address. Only needs a single image space, not one for both source and destination.
+// In place means modifying a single ImageSpace in place rather than relocating from one ImageSpace
+// to another.
+static bool RelocateInPlace(ImageHeader& image_header,
+ uint8_t* target_base,
+ accounting::ContinuousSpaceBitmap* bitmap,
+ const OatFile* app_oat_file,
+ std::string* error_msg) {
+ DCHECK(error_msg != nullptr);
+ if (!image_header.IsPic()) {
+ if (image_header.GetImageBegin() == target_base) {
+ return true;
+ }
+ *error_msg = StringPrintf("Cannot relocate non-pic image for oat file %s",
+ (app_oat_file != nullptr) ? app_oat_file->GetLocation().c_str() : "");
+ return false;
+ }
+ // Set up sections.
+ uint32_t boot_image_begin = 0;
+ uint32_t boot_image_end = 0;
+ uint32_t boot_oat_begin = 0;
+ uint32_t boot_oat_end = 0;
+ gc::Heap* const heap = Runtime::Current()->GetHeap();
+ heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
+ CHECK_NE(boot_image_begin, boot_image_end)
+ << "Can not relocate app image without boot image space";
+ CHECK_NE(boot_oat_begin, boot_oat_end) << "Can not relocate app image without boot oat file";
+ const uint32_t boot_image_size = boot_image_end - boot_image_begin;
+ const uint32_t boot_oat_size = boot_oat_end - boot_oat_begin;
+ const uint32_t image_header_boot_image_size = image_header.GetBootImageSize();
+ const uint32_t image_header_boot_oat_size = image_header.GetBootOatSize();
+ if (boot_image_size != image_header_boot_image_size) {
+ *error_msg = StringPrintf("Boot image size %" PRIu64 " does not match expected size %"
+ PRIu64,
+ static_cast<uint64_t>(boot_image_size),
+ static_cast<uint64_t>(image_header_boot_image_size));
+ return false;
+ }
+ if (boot_oat_size != image_header_boot_oat_size) {
+ *error_msg = StringPrintf("Boot oat size %" PRIu64 " does not match expected size %"
+ PRIu64,
+ static_cast<uint64_t>(boot_oat_size),
+ static_cast<uint64_t>(image_header_boot_oat_size));
+ return false;
+ }
+ TimingLogger logger(__FUNCTION__, true, false);
+ RelocationRange boot_image(image_header.GetBootImageBegin(),
+ boot_image_begin,
+ boot_image_size);
+ RelocationRange boot_oat(image_header.GetBootOatBegin(),
+ boot_oat_begin,
+ boot_oat_size);
+ RelocationRange app_image(reinterpret_cast<uintptr_t>(image_header.GetImageBegin()),
+ reinterpret_cast<uintptr_t>(target_base),
+ image_header.GetImageSize());
+ // Use the oat data section since this is where the OatFile::Begin is.
+ RelocationRange app_oat(reinterpret_cast<uintptr_t>(image_header.GetOatDataBegin()),
+ // Not necessarily in low 4GB.
+ reinterpret_cast<uintptr_t>(app_oat_file->Begin()),
+ image_header.GetOatDataEnd() - image_header.GetOatDataBegin());
+ VLOG(image) << "App image " << app_image;
+ VLOG(image) << "App oat " << app_oat;
+ VLOG(image) << "Boot image " << boot_image;
+ VLOG(image) << "Boot oat " << boot_oat;
+ // True if we need to fixup any heap pointers, otherwise only code pointers.
+ const bool fixup_image = boot_image.Delta() != 0 || app_image.Delta() != 0;
+ const bool fixup_code = boot_oat.Delta() != 0 || app_oat.Delta() != 0;
+ if (!fixup_image && !fixup_code) {
+ // Nothing to fix up.
+ return true;
+ }
+ // Need to update the image to be at the target base.
+ const ImageSection& objects_section = image_header.GetImageSection(ImageHeader::kSectionObjects);
+ uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
+ uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
+ // Two pass approach, fix up all classes first, then fix up non class-objects.
+ FixupObjectVisitor fixup_object_visitor(boot_image, boot_oat, app_image, app_oat);
+ if (fixup_image) {
+ TimingLogger::ScopedTiming timing("Fixup classes", &logger);
+ // Fixup class only touches app image classes, don't need the mutator lock since the space is
+ // not yet visible to the GC.
+ FixupClassVisitor fixup_class_visitor(boot_image, boot_oat, app_image, app_oat);
+ bitmap->VisitMarkedRange(objects_begin, objects_end, fixup_class_visitor);
+ // Fixup objects may read fields in the boot image, use the mutator lock here for sanity. Though
+ // its probably not required.
+ ScopedObjectAccess soa(Thread::Current());
+ timing.NewTiming("Fixup objects");
+ bitmap->VisitMarkedRange(objects_begin, objects_end, fixup_object_visitor);
+ FixupObjectAdapter fixup_adapter(boot_image, boot_oat, app_image, app_oat);
+ // Fixup image roots.
+ CHECK(app_image.ContainsSource(reinterpret_cast<uintptr_t>(image_header.GetImageRoots())));
+ image_header.RelocateImageObjects(app_image.Delta());
+ CHECK_EQ(image_header.GetImageBegin(), target_base);
+ // Fix up dex cache DexFile pointers.
+ auto* dex_caches = image_header.GetImageRoot(ImageHeader::kDexCaches)->
+ AsObjectArray<mirror::DexCache>();
+ for (int32_t i = 0, count = dex_caches->GetLength(); i < count; ++i) {
+ mirror::DexCache* dex_cache = dex_caches->Get(i);
+ // Fix up dex cache pointers.
+ GcRoot<mirror::String>* strings = dex_cache->GetStrings();
+ if (strings != nullptr) {
+ GcRoot<mirror::String>* new_strings = fixup_adapter.ForwardObject(strings);
+ if (strings != new_strings) {
+ dex_cache->SetFieldPtr64<false>(mirror::DexCache::StringsOffset(), new_strings);
+ }
+ dex_cache->FixupStrings(new_strings, fixup_adapter);
+ }
+ GcRoot<mirror::Class>* types = dex_cache->GetResolvedTypes();
+ if (types != nullptr) {
+ GcRoot<mirror::Class>* new_types = fixup_adapter.ForwardObject(types);
+ if (types != new_types) {
+ dex_cache->SetFieldPtr64<false>(mirror::DexCache::ResolvedTypesOffset(), new_types);
+ }
+ dex_cache->FixupResolvedTypes(new_types, fixup_adapter);
+ }
+ ArtMethod** methods = dex_cache->GetResolvedMethods();
+ if (methods != nullptr) {
+ ArtMethod** new_methods = fixup_adapter.ForwardObject(methods);
+ if (methods != new_methods) {
+ dex_cache->SetFieldPtr64<false>(mirror::DexCache::ResolvedMethodsOffset(), new_methods);
+ }
+ for (size_t j = 0, num = dex_cache->NumResolvedMethods(); j != num; ++j) {
+ ArtMethod* orig = mirror::DexCache::GetElementPtrSize(new_methods, j, sizeof(void*));
+ ArtMethod* copy = fixup_adapter.ForwardObject(orig);
+ if (orig != copy) {
+ mirror::DexCache::SetElementPtrSize(new_methods, j, copy, sizeof(void*));
+ }
+ }
+ }
+ ArtField** fields = dex_cache->GetResolvedFields();
+ if (fields != nullptr) {
+ ArtField** new_fields = fixup_adapter.ForwardObject(fields);
+ if (fields != new_fields) {
+ dex_cache->SetFieldPtr64<false>(mirror::DexCache::ResolvedFieldsOffset(), new_fields);
+ }
+ for (size_t j = 0, num = dex_cache->NumResolvedFields(); j != num; ++j) {
+ ArtField* orig = mirror::DexCache::GetElementPtrSize(new_fields, j, sizeof(void*));
+ ArtField* copy = fixup_adapter.ForwardObject(orig);
+ if (orig != copy) {
+ mirror::DexCache::SetElementPtrSize(new_fields, j, copy, sizeof(void*));
+ }
+ }
+ }
+ }
+ }
+ {
+ // Only touches objects in the app image, no need for mutator lock.
+ TimingLogger::ScopedTiming timing("Fixup methods", &logger);
+ FixupArtMethodVisitor method_visitor(fixup_image, boot_image, boot_oat, app_image, app_oat);
+ image_header.GetImageSection(ImageHeader::kSectionArtMethods).VisitPackedArtMethods(
+ &method_visitor,
+ target_base,
+ sizeof(void*));
+ }
+ if (fixup_image) {
+ {
+ // Only touches objects in the app image, no need for mutator lock.
+ TimingLogger::ScopedTiming timing("Fixup fields", &logger);
+ FixupArtFieldVisitor field_visitor(boot_image, boot_oat, app_image, app_oat);
+ image_header.GetImageSection(ImageHeader::kSectionArtFields).VisitPackedArtFields(
+ &field_visitor,
+ target_base);
+ }
+ // In the app image case, the image methods are actually in the boot image.
+ image_header.RelocateImageMethods(boot_image.Delta());
+ const auto& class_table_section = image_header.GetImageSection(ImageHeader::kSectionClassTable);
+ if (class_table_section.Size() > 0u) {
+ // Note that we require that ReadFromMemory does not make an internal copy of the elements.
+ // This also relies on visit roots not doing any verification which could fail after we update
+ // the roots to be the image addresses.
+ ScopedObjectAccess soa(Thread::Current());
+ WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
+ ClassTable temp_table;
+ temp_table.ReadFromMemory(target_base + class_table_section.Offset());
+ FixupRootVisitor root_visitor(boot_image, boot_oat, app_image, app_oat);
+ temp_table.VisitRoots(root_visitor);
+ }
+ }
+ if (VLOG_IS_ON(image)) {
+ logger.Dump(LOG(INFO));
+ }
+ return true;
+}
+
+ImageSpace* ImageSpace::Init(const char* image_filename,
+ const char* image_location,
+ bool validate_oat_file,
+ const OatFile* oat_file,
+ std::string* error_msg) {
CHECK(image_filename != nullptr);
CHECK(image_location != nullptr);
- uint64_t start_time = 0;
- if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
- start_time = NanoTime();
- LOG(INFO) << "ImageSpace::Init entering image_filename=" << image_filename;
- }
+ TimingLogger logger(__FUNCTION__, true, false);
+ VLOG(image) << "ImageSpace::Init entering image_filename=" << image_filename;
- std::unique_ptr<File> file(OS::OpenFileForReading(image_filename));
- if (file.get() == nullptr) {
- *error_msg = StringPrintf("Failed to open '%s'", image_filename);
- return nullptr;
+ std::unique_ptr<File> file;
+ {
+ TimingLogger::ScopedTiming timing("OpenImageFile", &logger);
+ file.reset(OS::OpenFileForReading(image_filename));
+ if (file == nullptr) {
+ *error_msg = StringPrintf("Failed to open '%s'", image_filename);
+ return nullptr;
+ }
}
- ImageHeader image_header;
- bool success = file->ReadFully(&image_header, sizeof(image_header));
- if (!success || !image_header.IsValid()) {
- *error_msg = StringPrintf("Invalid image header in '%s'", image_filename);
- return nullptr;
+ ImageHeader temp_image_header;
+ ImageHeader* image_header = &temp_image_header;
+ {
+ TimingLogger::ScopedTiming timing("ReadImageHeader", &logger);
+ bool success = file->ReadFully(image_header, sizeof(*image_header));
+ if (!success || !image_header->IsValid()) {
+ *error_msg = StringPrintf("Invalid image header in '%s'", image_filename);
+ return nullptr;
+ }
}
// Check that the file is larger or equal to the header size + data size.
const uint64_t image_file_size = static_cast<uint64_t>(file->GetLength());
- if (image_file_size < sizeof(ImageHeader) + image_header.GetDataSize()) {
+ if (image_file_size < sizeof(ImageHeader) + image_header->GetDataSize()) {
*error_msg = StringPrintf("Image file truncated: %" PRIu64 " vs. %" PRIu64 ".",
image_file_size,
- image_header.GetDataSize());
+ sizeof(ImageHeader) + image_header->GetDataSize());
return nullptr;
}
@@ -704,17 +1169,17 @@
LOG(INFO) << "Dumping image sections";
for (size_t i = 0; i < ImageHeader::kSectionCount; ++i) {
const auto section_idx = static_cast<ImageHeader::ImageSections>(i);
- auto& section = image_header.GetImageSection(section_idx);
+ auto& section = image_header->GetImageSection(section_idx);
LOG(INFO) << section_idx << " start="
- << reinterpret_cast<void*>(image_header.GetImageBegin() + section.Offset()) << " "
- << section;
+ << reinterpret_cast<void*>(image_header->GetImageBegin() + section.Offset()) << " "
+ << section;
}
}
- const auto& bitmap_section = image_header.GetImageSection(ImageHeader::kSectionImageBitmap);
+ const auto& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
// The location we want to map from is the first aligned page after the end of the stored
// (possibly compressed) data.
- const size_t image_bitmap_offset = RoundUp(sizeof(image_header) + image_header.GetDataSize(),
+ const size_t image_bitmap_offset = RoundUp(sizeof(ImageHeader) + image_header->GetDataSize(),
kPageSize);
const size_t end_of_bitmap = image_bitmap_offset + bitmap_section.Size();
if (end_of_bitmap != image_file_size) {
@@ -724,67 +1189,84 @@
return nullptr;
}
+ // The preferred address to map the image, null specifies any address. If we manage to map the
+ // image at the image begin, the amount of fixup work required is minimized.
+ std::vector<uint8_t*> addresses(1, image_header->GetImageBegin());
+ if (image_header->IsPic()) {
+ // Can also map at a random low_4gb address since we can relocate in-place.
+ addresses.push_back(nullptr);
+ }
+
// Note: The image header is part of the image due to mmap page alignment required of offset.
std::unique_ptr<MemMap> map;
- if (image_header.GetStorageMode() == ImageHeader::kStorageModeUncompressed) {
- map.reset(MemMap::MapFileAtAddress(image_header.GetImageBegin(),
- image_header.GetImageSize(),
- PROT_READ | PROT_WRITE,
- MAP_PRIVATE,
- file->Fd(),
- 0,
- /*low_4gb*/false,
- /*reuse*/false,
- image_filename,
- error_msg));
- } else {
- // Reserve output and decompress into it.
- map.reset(MemMap::MapAnonymous(image_location,
- image_header.GetImageBegin(),
- image_header.GetImageSize(),
- PROT_READ | PROT_WRITE,
- /*low_4gb*/false,
- /*reuse*/false,
- error_msg));
+ std::string temp_error_msg;
+ for (uint8_t* address : addresses) {
+ TimingLogger::ScopedTiming timing("MapImageFile", &logger);
+ // Only care about the error message for the last address in addresses. We want to avoid the
+ // overhead of printing the process maps if we can relocate.
+ std::string* out_error_msg = (address == addresses.back()) ? &temp_error_msg : nullptr;
+ if (image_header->GetStorageMode() == ImageHeader::kStorageModeUncompressed) {
+ map.reset(MemMap::MapFileAtAddress(address,
+ image_header->GetImageSize(),
+ PROT_READ | PROT_WRITE,
+ MAP_PRIVATE,
+ file->Fd(),
+ 0,
+ /*low_4gb*/true,
+ /*reuse*/false,
+ image_filename,
+ /*out*/out_error_msg));
+ } else {
+ // Reserve output and decompress into it.
+ map.reset(MemMap::MapAnonymous(image_location,
+ address,
+ image_header->GetImageSize(),
+ PROT_READ | PROT_WRITE,
+ /*low_4gb*/true,
+ /*reuse*/false,
+ out_error_msg));
+ if (map != nullptr) {
+ const size_t stored_size = image_header->GetDataSize();
+ const size_t write_offset = sizeof(ImageHeader); // Skip the header.
+ std::unique_ptr<MemMap> temp_map(MemMap::MapFile(sizeof(ImageHeader) + stored_size,
+ PROT_READ,
+ MAP_PRIVATE,
+ file->Fd(),
+ /*offset*/0,
+ /*low_4gb*/false,
+ image_filename,
+ out_error_msg));
+ if (temp_map == nullptr) {
+ DCHECK(!out_error_msg->empty());
+ return nullptr;
+ }
+ memcpy(map->Begin(), image_header, sizeof(ImageHeader));
+ const uint64_t start = NanoTime();
+ const size_t decompressed_size = LZ4_decompress_safe(
+ reinterpret_cast<char*>(temp_map->Begin()) + sizeof(ImageHeader),
+ reinterpret_cast<char*>(map->Begin()) + write_offset,
+ stored_size,
+ map->Size());
+ VLOG(image) << "Decompressing image took " << PrettyDuration(NanoTime() - start);
+ if (decompressed_size + sizeof(ImageHeader) != image_header->GetImageSize()) {
+ *error_msg = StringPrintf("Decompressed size does not match expected image size %zu vs %zu",
+ decompressed_size + sizeof(ImageHeader),
+ image_header->GetImageSize());
+ return nullptr;
+ }
+ }
+ }
if (map != nullptr) {
- const size_t stored_size = image_header.GetDataSize();
- const size_t write_offset = sizeof(image_header); // Skip the header.
- std::unique_ptr<MemMap> temp_map(MemMap::MapFile(sizeof(ImageHeader) + stored_size,
- PROT_READ,
- MAP_PRIVATE,
- file->Fd(),
- /*offset*/0,
- /*low_4gb*/false,
- image_filename,
- error_msg));
- if (temp_map == nullptr) {
- DCHECK(!error_msg->empty());
- return nullptr;
- }
- memcpy(map->Begin(), &image_header, sizeof(image_header));
- const uint64_t start = NanoTime();
- const size_t decompressed_size = LZ4_decompress_safe(
- reinterpret_cast<char*>(temp_map->Begin()) + sizeof(ImageHeader),
- reinterpret_cast<char*>(map->Begin()) + write_offset,
- stored_size,
- map->Size());
- // TODO: VLOG(image)
- VLOG(class_linker) << "Decompressing image took " << PrettyDuration(NanoTime() - start);
- if (decompressed_size + sizeof(ImageHeader) != image_header.GetImageSize()) {
- *error_msg = StringPrintf("Decompressed size does not match expected image size %zu vs %zu",
- decompressed_size + sizeof(ImageHeader),
- image_header.GetImageSize());
- return nullptr;
- }
+ break;
}
}
if (map == nullptr) {
- DCHECK(!error_msg->empty());
+ DCHECK(!temp_error_msg.empty());
+ *error_msg = temp_error_msg;
return nullptr;
}
- CHECK_EQ(image_header.GetImageBegin(), map->Begin());
- DCHECK_EQ(0, memcmp(&image_header, map->Begin(), sizeof(ImageHeader)));
+ DCHECK_EQ(0, memcmp(image_header, map->Begin(), sizeof(ImageHeader)));
std::unique_ptr<MemMap> image_bitmap_map(MemMap::MapFileAtAddress(nullptr,
bitmap_section.Size(),
@@ -799,25 +1281,42 @@
*error_msg = StringPrintf("Failed to map image bitmap: %s", error_msg->c_str());
return nullptr;
}
- uint32_t bitmap_index = bitmap_index_.FetchAndAddSequentiallyConsistent(1);
- std::string bitmap_name(StringPrintf("imagespace %s live-bitmap %u", image_filename,
+ // Loaded the map, use the image header from the file now in case we patch it with
+ // RelocateInPlace.
+ image_header = reinterpret_cast<ImageHeader*>(map->Begin());
+ const uint32_t bitmap_index = bitmap_index_.FetchAndAddSequentiallyConsistent(1);
+ std::string bitmap_name(StringPrintf("imagespace %s live-bitmap %u",
+ image_filename,
bitmap_index));
// Bitmap only needs to cover until the end of the mirror objects section.
- const ImageSection& image_objects = image_header.GetImageSection(ImageHeader::kSectionObjects);
- std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap(
+ const ImageSection& image_objects = image_header->GetImageSection(ImageHeader::kSectionObjects);
+ // We only want the mirror object, not the ArtFields and ArtMethods.
+ uint8_t* const image_end = map->Begin() + image_objects.End();
+ std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap;
+ {
+ TimingLogger::ScopedTiming timing("CreateImageBitmap", &logger);
+ bitmap.reset(
accounting::ContinuousSpaceBitmap::CreateFromMemMap(
bitmap_name,
image_bitmap_map.release(),
reinterpret_cast<uint8_t*>(map->Begin()),
image_objects.End()));
- if (bitmap == nullptr) {
- *error_msg = StringPrintf("Could not create bitmap '%s'", bitmap_name.c_str());
- return nullptr;
+ if (bitmap == nullptr) {
+ *error_msg = StringPrintf("Could not create bitmap '%s'", bitmap_name.c_str());
+ return nullptr;
+ }
}
-
+ {
+ TimingLogger::ScopedTiming timing("RelocateImage", &logger);
+ if (!RelocateInPlace(*image_header,
+ map->Begin(),
+ bitmap.get(),
+ oat_file,
+ error_msg)) {
+ return nullptr;
+ }
+ }
// We only want the mirror object, not the ArtFields and ArtMethods.
- uint8_t* const image_end =
- map->Begin() + image_header.GetImageSection(ImageHeader::kSectionObjects).End();
std::unique_ptr<ImageSpace> space(new ImageSpace(image_filename,
image_location,
map.release(),
@@ -829,38 +1328,61 @@
// and ArtField::java_lang_reflect_ArtField_, which are used from
// Object::SizeOf() which VerifyImageAllocations() calls, are not
// set yet at this point.
-
- space->oat_file_.reset(space->OpenOatFile(image_filename, error_msg));
- if (space->oat_file_.get() == nullptr) {
- DCHECK(!error_msg->empty());
- return nullptr;
+ if (oat_file == nullptr) {
+ TimingLogger::ScopedTiming timing("OpenOatFile", &logger);
+ space->oat_file_.reset(space->OpenOatFile(image_filename, error_msg));
+ if (space->oat_file_ == nullptr) {
+ DCHECK(!error_msg->empty());
+ return nullptr;
+ }
+ space->oat_file_non_owned_ = space->oat_file_.get();
+ } else {
+ space->oat_file_non_owned_ = oat_file;
}
- space->oat_file_non_owned_ = space->oat_file_.get();
- if (validate_oat_file && !space->ValidateOatFile(error_msg)) {
- DCHECK(!error_msg->empty());
- return nullptr;
+ if (validate_oat_file) {
+ TimingLogger::ScopedTiming timing("ValidateOatFile", &logger);
+ if (!space->ValidateOatFile(error_msg)) {
+ DCHECK(!error_msg->empty());
+ return nullptr;
+ }
}
Runtime* runtime = Runtime::Current();
- runtime->SetInstructionSet(space->oat_file_->GetOatHeader().GetInstructionSet());
- if (!runtime->HasResolutionMethod()) {
- runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
- runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
+ // If oat_file is null, then it is the boot image space. Use oat_file_non_owned_ from the space
+ // to set the runtime methods.
+ CHECK_EQ(oat_file != nullptr, image_header->IsAppImage());
+ if (image_header->IsAppImage()) {
+ CHECK_EQ(runtime->GetResolutionMethod(),
+ image_header->GetImageMethod(ImageHeader::kResolutionMethod));
+ CHECK_EQ(runtime->GetImtConflictMethod(),
+ image_header->GetImageMethod(ImageHeader::kImtConflictMethod));
+ CHECK_EQ(runtime->GetImtUnimplementedMethod(),
+ image_header->GetImageMethod(ImageHeader::kImtUnimplementedMethod));
+ CHECK_EQ(runtime->GetCalleeSaveMethod(Runtime::kSaveAll),
+ image_header->GetImageMethod(ImageHeader::kCalleeSaveMethod));
+ CHECK_EQ(runtime->GetCalleeSaveMethod(Runtime::kRefsOnly),
+ image_header->GetImageMethod(ImageHeader::kRefsOnlySaveMethod));
+ CHECK_EQ(runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs),
+ image_header->GetImageMethod(ImageHeader::kRefsAndArgsSaveMethod));
+ } else if (!runtime->HasResolutionMethod()) {
+ runtime->SetInstructionSet(space->oat_file_non_owned_->GetOatHeader().GetInstructionSet());
+ runtime->SetResolutionMethod(image_header->GetImageMethod(ImageHeader::kResolutionMethod));
+ runtime->SetImtConflictMethod(image_header->GetImageMethod(ImageHeader::kImtConflictMethod));
runtime->SetImtUnimplementedMethod(
- image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
+ image_header->GetImageMethod(ImageHeader::kImtUnimplementedMethod));
runtime->SetCalleeSaveMethod(
- image_header.GetImageMethod(ImageHeader::kCalleeSaveMethod), Runtime::kSaveAll);
+ image_header->GetImageMethod(ImageHeader::kCalleeSaveMethod), Runtime::kSaveAll);
runtime->SetCalleeSaveMethod(
- image_header.GetImageMethod(ImageHeader::kRefsOnlySaveMethod), Runtime::kRefsOnly);
+ image_header->GetImageMethod(ImageHeader::kRefsOnlySaveMethod), Runtime::kRefsOnly);
runtime->SetCalleeSaveMethod(
- image_header.GetImageMethod(ImageHeader::kRefsAndArgsSaveMethod), Runtime::kRefsAndArgs);
+ image_header->GetImageMethod(ImageHeader::kRefsAndArgsSaveMethod), Runtime::kRefsAndArgs);
}
- if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
- LOG(INFO) << "ImageSpace::Init exiting (" << PrettyDuration(NanoTime() - start_time)
- << ") " << *space.get();
+ VLOG(image) << "ImageSpace::Init exiting " << *space.get();
+ if (VLOG_IS_ON(image)) {
+ logger.Dump(LOG(INFO));
}
return space.release();
}
@@ -1002,6 +1524,16 @@
}
}
+ImageSpace* ImageSpace::CreateFromAppImage(const char* image,
+ const OatFile* oat_file,
+ std::string* error_msg) {
+ return gc::space::ImageSpace::Init(image,
+ image,
+ /*validate_oat_file*/false,
+ oat_file,
+ /*out*/error_msg);
+}
+
} // namespace space
} // namespace gc
} // namespace art
diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h
index 9c8e8b2..f2f4163 100644
--- a/runtime/gc/space/image_space.h
+++ b/runtime/gc/space/image_space.h
@@ -35,7 +35,7 @@
return kSpaceTypeImageSpace;
}
- // Create a Space from an image file for a specified instruction
+ // Create a boot image space from an image file for a specified instruction
// set. Cannot be used for future allocation or collected.
//
// Create also opens the OatFile associated with the image file so
@@ -43,10 +43,16 @@
// creation of the alloc space. The ReleaseOatFile will later be
// used to transfer ownership of the OatFile to the ClassLinker when
// it is initialized.
- static ImageSpace* Create(const char* image,
- InstructionSet image_isa,
- bool secondary_image,
- std::string* error_msg)
+ static ImageSpace* CreateBootImage(const char* image,
+ InstructionSet image_isa,
+ bool secondary_image,
+ std::string* error_msg)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
+ // Try to open an existing app image space.
+ static ImageSpace* CreateFromAppImage(const char* image,
+ const OatFile* oat_file,
+ std::string* error_msg)
SHARED_REQUIRES(Locks::mutator_lock_);
// Reads the image header from the specified image location for the
@@ -144,15 +150,17 @@
}
protected:
- // Tries to initialize an ImageSpace from the given image path,
- // returning null on error.
+ // Tries to initialize an ImageSpace from the given image path, returning null on error.
//
- // If validate_oat_file is false (for /system), do not verify that
- // image's OatFile is up-to-date relative to its DexFile
- // inputs. Otherwise (for /data), validate the inputs and generate
- // the OatFile in /data/dalvik-cache if necessary.
- static ImageSpace* Init(const char* image_filename, const char* image_location,
- bool validate_oat_file, std::string* error_msg)
+ // If validate_oat_file is false (for /system), do not verify that image's OatFile is up-to-date
+ // relative to its DexFile inputs. Otherwise (for /data), validate the inputs and generate the
+ // OatFile in /data/dalvik-cache if necessary. If the oat_file is null, it uses the oat file from
+ // the image.
+ static ImageSpace* Init(const char* image_filename,
+ const char* image_location,
+ bool validate_oat_file,
+ const OatFile* oat_file,
+ std::string* error_msg)
SHARED_REQUIRES(Locks::mutator_lock_);
OatFile* OpenOatFile(const char* image, std::string* error_msg) const
diff --git a/runtime/image.cc b/runtime/image.cc
index 4254d94..2fed4d3 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -35,8 +35,13 @@
uint32_t oat_data_begin,
uint32_t oat_data_end,
uint32_t oat_file_end,
+ uint32_t boot_image_begin,
+ uint32_t boot_image_size,
+ uint32_t boot_oat_begin,
+ uint32_t boot_oat_size,
uint32_t pointer_size,
bool compile_pic,
+ bool is_pic,
StorageMode storage_mode,
size_t data_size)
: image_begin_(image_begin),
@@ -46,10 +51,15 @@
oat_data_begin_(oat_data_begin),
oat_data_end_(oat_data_end),
oat_file_end_(oat_file_end),
+ boot_image_begin_(boot_image_begin),
+ boot_image_size_(boot_image_size),
+ boot_oat_begin_(boot_oat_begin),
+ boot_oat_size_(boot_oat_size),
patch_delta_(0),
image_roots_(image_roots),
pointer_size_(pointer_size),
compile_pic_(compile_pic),
+ is_pic_(is_pic),
storage_mode_(storage_mode),
data_size_(data_size) {
CHECK_EQ(image_begin, RoundUp(image_begin, kPageSize));
@@ -67,13 +77,21 @@
void ImageHeader::RelocateImage(off_t delta) {
CHECK_ALIGNED(delta, kPageSize) << " patch delta must be page aligned";
- image_begin_ += delta;
oat_file_begin_ += delta;
oat_data_begin_ += delta;
oat_data_end_ += delta;
oat_file_end_ += delta;
- image_roots_ += delta;
patch_delta_ += delta;
+ RelocateImageObjects(delta);
+ RelocateImageMethods(delta);
+}
+
+void ImageHeader::RelocateImageObjects(off_t delta) {
+ image_begin_ += delta;
+ image_roots_ += delta;
+}
+
+void ImageHeader::RelocateImageMethods(off_t delta) {
for (size_t i = 0; i < kImageMethodsCount; ++i) {
image_methods_[i] += delta;
}
diff --git a/runtime/image.h b/runtime/image.h
index 7418f66..b3f177b 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -93,10 +93,15 @@
oat_data_begin_(0U),
oat_data_end_(0U),
oat_file_end_(0U),
+ boot_image_begin_(0U),
+ boot_image_size_(0U),
+ boot_oat_begin_(0U),
+ boot_oat_size_(0U),
patch_delta_(0),
image_roots_(0U),
pointer_size_(0U),
compile_pic_(0),
+ is_pic_(0),
storage_mode_(kDefaultStorageMode),
data_size_(0) {}
@@ -109,8 +114,13 @@
uint32_t oat_data_begin,
uint32_t oat_data_end,
uint32_t oat_file_end,
+ uint32_t boot_image_begin,
+ uint32_t boot_image_size,
+ uint32_t boot_oat_begin,
+ uint32_t boot_oat_size,
uint32_t pointer_size,
bool compile_pic,
+ bool is_pic,
StorageMode storage_mode,
size_t data_size);
@@ -208,11 +218,33 @@
SHARED_REQUIRES(Locks::mutator_lock_);
void RelocateImage(off_t delta);
+ void RelocateImageMethods(off_t delta);
+ void RelocateImageObjects(off_t delta);
bool CompilePic() const {
return compile_pic_ != 0;
}
+ bool IsPic() const {
+ return is_pic_ != 0;
+ }
+
+ uint32_t GetBootImageBegin() const {
+ return boot_image_begin_;
+ }
+
+ uint32_t GetBootImageSize() const {
+ return boot_image_size_;
+ }
+
+ uint32_t GetBootOatBegin() const {
+ return boot_oat_begin_;
+ }
+
+ uint32_t GetBootOatSize() const {
+ return boot_oat_size_;
+ }
+
StorageMode GetStorageMode() const {
return storage_mode_;
}
@@ -221,6 +253,12 @@
return data_size_;
}
+ bool IsAppImage() const {
+ // App images currently require a boot image, if the size is non zero then it is an app image
+ // header.
+ return boot_image_size_ != 0u;
+ }
+
private:
static const uint8_t kImageMagic[4];
static const uint8_t kImageVersion[4];
@@ -250,6 +288,16 @@
// .so files. Used for positioning a following alloc spaces.
uint32_t oat_file_end_;
+ // Boot image begin and end (app image headers only).
+ uint32_t boot_image_begin_;
+ uint32_t boot_image_size_;
+
+ // Boot oat begin and end (app image headers only).
+ uint32_t boot_oat_begin_;
+ uint32_t boot_oat_size_;
+
+ // TODO: We should probably insert a boot image checksum for app images.
+
// The total delta that this image has been patched.
int32_t patch_delta_;
@@ -262,10 +310,15 @@
// Boolean (0 or 1) to denote if the image was compiled with --compile-pic option
const uint32_t compile_pic_;
+ // Boolean (0 or 1) to denote if the image can be mapped at a random address, this only refers to
+ // the .art file. Currently, app oat files do not depend on their app image. There are no pointers
+ // from the app oat code to the app image.
+ const uint32_t is_pic_;
+
// Image section sizes/offsets correspond to the uncompressed form.
ImageSection sections_[kSectionCount];
- // Image methods.
+ // Image methods, may be inside of the boot image for app images.
uint64_t image_methods_[kImageMethodsCount];
// Storage method for the image, the image may be compressed.
diff --git a/runtime/intern_table.cc b/runtime/intern_table.cc
index 015bf98..96854da 100644
--- a/runtime/intern_table.cc
+++ b/runtime/intern_table.cc
@@ -86,14 +86,24 @@
// Note: we deliberately don't visit the weak_interns_ table and the immutable image roots.
}
-mirror::String* InternTable::LookupStrong(mirror::String* s) {
- return strong_interns_.Find(s);
+mirror::String* InternTable::LookupWeak(Thread* self, mirror::String* s) {
+ MutexLock mu(self, *Locks::intern_table_lock_);
+ return LookupWeakLocked(s);
}
-mirror::String* InternTable::LookupWeak(mirror::String* s) {
+mirror::String* InternTable::LookupStrong(Thread* self, mirror::String* s) {
+ MutexLock mu(self, *Locks::intern_table_lock_);
+ return LookupStrongLocked(s);
+}
+
+mirror::String* InternTable::LookupWeakLocked(mirror::String* s) {
return weak_interns_.Find(s);
}
+mirror::String* InternTable::LookupStrongLocked(mirror::String* s) {
+ return strong_interns_.Find(s);
+}
+
void InternTable::AddNewTable() {
MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
weak_interns_.AddNewTable();
@@ -169,7 +179,7 @@
for (size_t j = 0; j < num_strings; ++j) {
mirror::String* image_string = dex_cache->GetResolvedString(j);
if (image_string != nullptr) {
- mirror::String* found = LookupStrong(image_string);
+ mirror::String* found = LookupStrongLocked(image_string);
if (found == nullptr) {
InsertStrong(image_string);
} else {
@@ -250,7 +260,7 @@
}
}
// Check the strong table for a match.
- mirror::String* strong = LookupStrong(s);
+ mirror::String* strong = LookupStrongLocked(s);
if (strong != nullptr) {
return strong;
}
@@ -272,7 +282,7 @@
CHECK(self->GetWeakRefAccessEnabled());
}
// There is no match in the strong table, check the weak table.
- mirror::String* weak = LookupWeak(s);
+ mirror::String* weak = LookupWeakLocked(s);
if (weak != nullptr) {
if (is_strong) {
// A match was found in the weak table. Promote to the strong table.
@@ -317,8 +327,7 @@
}
bool InternTable::ContainsWeak(mirror::String* s) {
- MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
- return LookupWeak(s) == s;
+ return LookupWeak(Thread::Current(), s) == s;
}
void InternTable::SweepInternTableWeaks(IsMarkedVisitor* visitor) {
diff --git a/runtime/intern_table.h b/runtime/intern_table.h
index 2b2176e..274f5ad 100644
--- a/runtime/intern_table.h
+++ b/runtime/intern_table.h
@@ -84,10 +84,22 @@
bool ContainsWeak(mirror::String* s) SHARED_REQUIRES(Locks::mutator_lock_)
REQUIRES(!Locks::intern_table_lock_);
+ // Lookup a strong intern, returns null if not found.
+ mirror::String* LookupStrong(Thread* self, mirror::String* s)
+ REQUIRES(!Locks::intern_table_lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
+ // Lookup a weak intern, returns null if not found.
+ mirror::String* LookupWeak(Thread* self, mirror::String* s)
+ REQUIRES(!Locks::intern_table_lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
// Total number of interned strings.
size_t Size() const REQUIRES(!Locks::intern_table_lock_);
+
// Total number of weakly live interned strings.
size_t StrongSize() const REQUIRES(!Locks::intern_table_lock_);
+
// Total number of strongly live interned strings.
size_t WeakSize() const REQUIRES(!Locks::intern_table_lock_);
@@ -186,9 +198,9 @@
mirror::String* Insert(mirror::String* s, bool is_strong, bool holding_locks)
REQUIRES(!Locks::intern_table_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
- mirror::String* LookupStrong(mirror::String* s)
+ mirror::String* LookupStrongLocked(mirror::String* s)
SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
- mirror::String* LookupWeak(mirror::String* s)
+ mirror::String* LookupWeakLocked(mirror::String* s)
SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
mirror::String* InsertStrong(mirror::String* s)
SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 53118e0..75a3f1a 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -47,11 +47,15 @@
return GetField32(ObjectSizeOffset());
}
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline Class* Class::GetSuperClass() {
// Can only get super class for loaded classes (hack for when runtime is
// initializing)
- DCHECK(IsLoaded() || IsErroneous() || !Runtime::Current()->IsStarted()) << IsLoaded();
- return GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(Class, super_class_));
+ DCHECK(IsLoaded<kVerifyFlags>() ||
+ IsErroneous<kVerifyFlags>() ||
+ !Runtime::Current()->IsStarted()) << IsLoaded();
+ return GetFieldObject<Class, kVerifyFlags, kReadBarrierOption>(
+ OFFSET_OF_OBJECT_MEMBER(Class, super_class_));
}
inline ClassLoader* Class::GetClassLoader() {
@@ -226,9 +230,12 @@
return &GetVirtualMethodsSliceUnchecked(pointer_size).At(i);
}
+template<VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption>
inline PointerArray* Class::GetVTable() {
- DCHECK(IsResolved() || IsErroneous());
- return GetFieldObject<PointerArray>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
+ DCHECK(IsResolved<kVerifyFlags>() || IsErroneous<kVerifyFlags>());
+ return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(
+ OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
}
inline PointerArray* Class::GetVTableDuringLinking() {
@@ -499,8 +506,11 @@
return FindVirtualMethodForVirtual(method, pointer_size);
}
+template<VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption>
inline IfTable* Class::GetIfTable() {
- return GetFieldObject<IfTable>(OFFSET_OF_OBJECT_MEMBER(Class, iftable_));
+ return GetFieldObject<IfTable, kVerifyFlags, kReadBarrierOption>(
+ OFFSET_OF_OBJECT_MEMBER(Class, iftable_));
}
inline int32_t Class::GetIfTableCount() {
@@ -516,7 +526,7 @@
}
inline LengthPrefixedArray<ArtField>* Class::GetIFieldsPtr() {
- DCHECK(IsLoaded() || IsErroneous());
+ DCHECK(IsLoaded() || IsErroneous()) << GetStatus();
return GetFieldPtr<LengthPrefixedArray<ArtField>*>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_));
}
@@ -747,9 +757,12 @@
return size;
}
-template <typename Visitor>
+template <bool kVisitNativeRoots,
+ VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption,
+ typename Visitor>
inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) {
- VisitInstanceFieldsReferences(klass, visitor);
+ VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
// Right after a class is allocated, but not yet loaded
// (kStatusNotReady, see ClassLinker::LoadClass()), GC may find it
// and scan it. IsTemp() may call Class::GetAccessFlags() but may
@@ -757,14 +770,16 @@
// status is kStatusNotReady. To avoid it, rely on IsResolved()
// only. This is fine because a temp class never goes into the
// kStatusResolved state.
- if (IsResolved()) {
+ if (IsResolved<kVerifyFlags>()) {
// Temp classes don't ever populate imt/vtable or static fields and they are not even
// allocated with the right size for those. Also, unresolved classes don't have fields
// linked yet.
- VisitStaticFieldsReferences(this, visitor);
+ VisitStaticFieldsReferences<kVerifyFlags, kReadBarrierOption>(this, visitor);
}
- // Since this class is reachable, we must also visit the associated roots when we scan it.
- VisitNativeRoots(visitor, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
+ if (kVisitNativeRoots) {
+ // Since this class is reachable, we must also visit the associated roots when we scan it.
+ VisitNativeRoots(visitor, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
+ }
}
template<ReadBarrierOption kReadBarrierOption>
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 6b5ed91..3017820 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -494,10 +494,11 @@
(IsAbstract() && IsArrayClass());
}
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsObjectArrayClass() SHARED_REQUIRES(Locks::mutator_lock_) {
- return GetComponentType<kVerifyFlags>() != nullptr &&
- !GetComponentType<kVerifyFlags>()->IsPrimitive();
+ mirror::Class* const component_type = GetComponentType<kVerifyFlags, kReadBarrierOption>();
+ return component_type != nullptr && !component_type->IsPrimitive();
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -656,6 +657,8 @@
// to themselves. Classes for primitive types may not assign to each other.
ALWAYS_INLINE bool IsAssignableFrom(Class* src) SHARED_REQUIRES(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE Class* GetSuperClass() SHARED_REQUIRES(Locks::mutator_lock_);
// Get first common super class. It will never return null.
@@ -791,6 +794,8 @@
ArtMethod* GetVirtualMethodDuringLinking(size_t i, size_t pointer_size)
SHARED_REQUIRES(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE PointerArray* GetVTable() SHARED_REQUIRES(Locks::mutator_lock_);
ALWAYS_INLINE PointerArray* GetVTableDuringLinking() SHARED_REQUIRES(Locks::mutator_lock_);
@@ -941,6 +946,8 @@
ALWAYS_INLINE int32_t GetIfTableCount() SHARED_REQUIRES(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE IfTable* GetIfTable() SHARED_REQUIRES(Locks::mutator_lock_);
ALWAYS_INLINE void SetIfTable(IfTable* new_iftable) SHARED_REQUIRES(Locks::mutator_lock_);
@@ -1226,7 +1233,8 @@
// Fix up all of the native pointers in the class by running them through the visitor. Only sets
// the corresponding entry in dest if visitor(obj) != obj to prevent dirty memory. Dest should be
- // initialized to a copy of *this to prevent issues.
+ // initialized to a copy of *this to prevent issues. Does not visit the ArtMethod and ArtField
+ // roots.
template <typename Visitor>
void FixupNativePointers(mirror::Class* dest, size_t pointer_size, const Visitor& visitor)
SHARED_REQUIRES(Locks::mutator_lock_);
@@ -1277,7 +1285,10 @@
static MemberOffset EmbeddedImTableOffset(size_t pointer_size);
static MemberOffset EmbeddedVTableOffset(size_t pointer_size);
- template <typename Visitor>
+ template <bool kVisitNativeRoots,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void VisitReferences(mirror::Class* klass, const Visitor& visitor)
SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/runtime/mirror/class_loader-inl.h b/runtime/mirror/class_loader-inl.h
index e22ddd7..84fa80f 100644
--- a/runtime/mirror/class_loader-inl.h
+++ b/runtime/mirror/class_loader-inl.h
@@ -25,15 +25,20 @@
namespace art {
namespace mirror {
-template <VerifyObjectFlags kVerifyFlags, typename Visitor>
+template <bool kVisitClasses,
+ VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption,
+ typename Visitor>
inline void ClassLoader::VisitReferences(mirror::Class* klass, const Visitor& visitor) {
// Visit instance fields first.
- VisitInstanceFieldsReferences(klass, visitor);
- // Visit classes loaded after.
- ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
- ClassTable* const class_table = GetClassTable();
- if (class_table != nullptr) {
- class_table->VisitRoots(visitor);
+ VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
+ if (kVisitClasses) {
+ // Visit classes loaded after.
+ ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
+ ClassTable* const class_table = GetClassTable();
+ if (class_table != nullptr) {
+ class_table->VisitRoots(visitor);
+ }
}
}
diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h
index c2a65d6..1957e13 100644
--- a/runtime/mirror/class_loader.h
+++ b/runtime/mirror/class_loader.h
@@ -63,7 +63,10 @@
private:
// Visit instance fields of the class loader as well as its associated classes.
// Null class loader is handled by ClassLinker::VisitClassRoots.
- template <VerifyObjectFlags kVerifyFlags, typename Visitor>
+ template <bool kVisitClasses,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void VisitReferences(mirror::Class* klass, const Visitor& visitor)
SHARED_REQUIRES(Locks::mutator_lock_)
REQUIRES(!Locks::classlinker_classes_lock_);
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 975af61..2ecc9fb 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -122,18 +122,23 @@
}
}
-template <VerifyObjectFlags kVerifyFlags, typename Visitor>
+template <bool kVisitNativeRoots,
+ VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption,
+ typename Visitor>
inline void DexCache::VisitReferences(mirror::Class* klass, const Visitor& visitor) {
// Visit instance fields first.
- VisitInstanceFieldsReferences(klass, visitor);
+ VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
// Visit arrays after.
- GcRoot<mirror::String>* strings = GetStrings();
- for (size_t i = 0, num_strings = NumStrings(); i != num_strings; ++i) {
- visitor.VisitRootIfNonNull(strings[i].AddressWithoutBarrier());
- }
- GcRoot<mirror::Class>* resolved_types = GetResolvedTypes();
- for (size_t i = 0, num_types = NumResolvedTypes(); i != num_types; ++i) {
- visitor.VisitRootIfNonNull(resolved_types[i].AddressWithoutBarrier());
+ if (kVisitNativeRoots) {
+ GcRoot<mirror::String>* strings = GetStrings();
+ for (size_t i = 0, num_strings = NumStrings(); i != num_strings; ++i) {
+ visitor.VisitRootIfNonNull(strings[i].AddressWithoutBarrier());
+ }
+ GcRoot<mirror::Class>* resolved_types = GetResolvedTypes();
+ for (size_t i = 0, num_types = NumResolvedTypes(); i != num_types; ++i) {
+ visitor.VisitRootIfNonNull(resolved_types[i].AddressWithoutBarrier());
+ }
}
}
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index 349a319..7b058d0 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -51,10 +51,10 @@
SetDexFile(dex_file);
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_), location);
- SetField64<false>(StringsOffset(), reinterpret_cast<uintptr_t>(strings));
- SetField64<false>(ResolvedTypesOffset(), reinterpret_cast<uintptr_t>(resolved_types));
- SetField64<false>(ResolvedMethodsOffset(), reinterpret_cast<uintptr_t>(resolved_methods));
- SetField64<false>(ResolvedFieldsOffset(), reinterpret_cast<uintptr_t>(resolved_fields));
+ SetStrings(strings);
+ SetResolvedTypes(resolved_types);
+ SetResolvedMethods(resolved_methods);
+ SetResolvedFields(resolved_fields);
SetField32<false>(NumStringsOffset(), num_strings);
SetField32<false>(NumResolvedTypesOffset(), num_resolved_types);
SetField32<false>(NumResolvedMethodsOffset(), num_resolved_methods);
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 32eb595..5ed061f 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -137,18 +137,40 @@
return GetFieldPtr<GcRoot<String>*>(StringsOffset());
}
+ void SetStrings(GcRoot<String>* strings) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) {
+ SetFieldPtr<false>(StringsOffset(), strings);
+ }
+
GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) {
return GetFieldPtr<GcRoot<Class>*>(ResolvedTypesOffset());
}
+ void SetResolvedTypes(GcRoot<Class>* resolved_types)
+ ALWAYS_INLINE
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
+ }
+
ArtMethod** GetResolvedMethods() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) {
return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset());
}
+ void SetResolvedMethods(ArtMethod** resolved_methods)
+ ALWAYS_INLINE
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods);
+ }
+
ArtField** GetResolvedFields() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) {
return GetFieldPtr<ArtField**>(ResolvedFieldsOffset());
}
+ void SetResolvedFields(ArtField** resolved_fields)
+ ALWAYS_INLINE
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields);
+ }
+
size_t NumStrings() SHARED_REQUIRES(Locks::mutator_lock_) {
return GetField32(NumStringsOffset());
}
@@ -186,7 +208,10 @@
private:
// Visit instance fields of the dex cache as well as its associated arrays.
- template <VerifyObjectFlags kVerifyFlags, typename Visitor>
+ template <bool kVisitNativeRoots,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void VisitReferences(mirror::Class* klass, const Visitor& visitor)
SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h
index b21ecdf..605deac 100644
--- a/runtime/mirror/iftable.h
+++ b/runtime/mirror/iftable.h
@@ -34,8 +34,11 @@
ALWAYS_INLINE void SetInterface(int32_t i, Class* interface)
SHARED_REQUIRES(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
PointerArray* GetMethodArray(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_) {
- auto* method_array = down_cast<PointerArray*>(Get((i * kMax) + kMethodArray));
+ auto* method_array = down_cast<PointerArray*>(Get<kVerifyFlags, kReadBarrierOption>(
+ (i * kMax) + kMethodArray));
DCHECK(method_array != nullptr);
return method_array;
}
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 4603428..760de9a 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -1031,7 +1031,10 @@
return success;
}
-template<bool kIsStatic, typename Visitor>
+template<bool kIsStatic,
+ VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption,
+ typename Visitor>
inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) {
if (!kIsStatic && (ref_offsets != mirror::Class::kClassWalkSuper)) {
// Instance fields and not the slow-path.
@@ -1047,9 +1050,12 @@
// There is no reference offset bitmap. In the non-static case, walk up the class
// inheritance hierarchy and find reference offsets the hard way. In the static case, just
// consider this class.
- for (mirror::Class* klass = kIsStatic ? AsClass() : GetClass(); klass != nullptr;
- klass = kIsStatic ? nullptr : klass->GetSuperClass()) {
- size_t num_reference_fields =
+ for (mirror::Class* klass = kIsStatic
+ ? AsClass<kVerifyFlags, kReadBarrierOption>()
+ : GetClass<kVerifyFlags, kReadBarrierOption>();
+ klass != nullptr;
+ klass = kIsStatic ? nullptr : klass->GetSuperClass<kVerifyFlags, kReadBarrierOption>()) {
+ const size_t num_reference_fields =
kIsStatic ? klass->NumReferenceStaticFields() : klass->NumReferenceInstanceFields();
if (num_reference_fields == 0u) {
continue;
@@ -1072,49 +1078,54 @@
}
}
-template<typename Visitor>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
- VisitFieldsReferences<false>(klass->GetReferenceInstanceOffsets<kVerifyNone>(), visitor);
+ VisitFieldsReferences<false, kVerifyFlags, kReadBarrierOption>(
+ klass->GetReferenceInstanceOffsets<kVerifyFlags>(), visitor);
}
-template<typename Visitor>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
DCHECK(!klass->IsTemp());
- klass->VisitFieldsReferences<true>(0, visitor);
+ klass->VisitFieldsReferences<true, kVerifyFlags, kReadBarrierOption>(0, visitor);
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Object::IsClassLoader() {
- return GetClass<kVerifyFlags>()->IsClassLoaderClass();
+ return GetClass<kVerifyFlags, kReadBarrierOption>()->IsClassLoaderClass();
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline mirror::ClassLoader* Object::AsClassLoader() {
- DCHECK(IsClassLoader<kVerifyFlags>());
+ DCHECK((IsClassLoader<kVerifyFlags, kReadBarrierOption>()));
return down_cast<mirror::ClassLoader*>(this);
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Object::IsDexCache() {
- return GetClass<kVerifyFlags>()->IsDexCacheClass();
+ return GetClass<kVerifyFlags, kReadBarrierOption>()->IsDexCacheClass();
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline mirror::DexCache* Object::AsDexCache() {
- DCHECK(IsDexCache<kVerifyFlags>());
+ DCHECK((IsDexCache<kVerifyFlags, kReadBarrierOption>()));
return down_cast<mirror::DexCache*>(this);
}
-template <VerifyObjectFlags kVerifyFlags, typename Visitor, typename JavaLangRefVisitor>
+template <bool kVisitNativeRoots,
+ VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption,
+ typename Visitor,
+ typename JavaLangRefVisitor>
inline void Object::VisitReferences(const Visitor& visitor,
const JavaLangRefVisitor& ref_visitor) {
- mirror::Class* klass = GetClass<kVerifyFlags>();
+ mirror::Class* klass = GetClass<kVerifyFlags, kReadBarrierOption>();
visitor(this, ClassOffset(), false);
const uint32_t class_flags = klass->GetClassFlags<kVerifyNone>();
if (LIKELY(class_flags == kClassFlagNormal)) {
DCHECK(!klass->IsVariableSize());
VisitInstanceFieldsReferences(klass, visitor);
- DCHECK(!klass->IsClassClass());
+ DCHECK((!klass->IsClassClass<kVerifyFlags, kReadBarrierOption>()));
DCHECK(!klass->IsStringClass());
DCHECK(!klass->IsClassLoaderClass());
DCHECK(!klass->IsArrayClass());
@@ -1123,23 +1134,29 @@
DCHECK(!klass->IsStringClass());
if (class_flags == kClassFlagClass) {
DCHECK(klass->IsClassClass());
- AsClass<kVerifyNone>()->VisitReferences(klass, visitor);
+ AsClass<kVerifyNone>()->VisitReferences<kVisitNativeRoots,
+ kVerifyFlags,
+ kReadBarrierOption>(klass, visitor);
} else if (class_flags == kClassFlagObjectArray) {
- DCHECK(klass->IsObjectArrayClass());
+ DCHECK((klass->IsObjectArrayClass<kVerifyFlags, kReadBarrierOption>()));
AsObjectArray<mirror::Object, kVerifyNone>()->VisitReferences(visitor);
} else if ((class_flags & kClassFlagReference) != 0) {
VisitInstanceFieldsReferences(klass, visitor);
ref_visitor(klass, AsReference());
} else if (class_flags == kClassFlagDexCache) {
- mirror::DexCache* const dex_cache = AsDexCache<kVerifyFlags>();
- dex_cache->VisitReferences<kVerifyFlags>(klass, visitor);
+ mirror::DexCache* const dex_cache = AsDexCache<kVerifyFlags, kReadBarrierOption>();
+ dex_cache->VisitReferences<kVisitNativeRoots,
+ kVerifyFlags,
+ kReadBarrierOption>(klass, visitor);
} else {
- mirror::ClassLoader* const class_loader = AsClassLoader<kVerifyFlags>();
- class_loader->VisitReferences<kVerifyFlags>(klass, visitor);
+ mirror::ClassLoader* const class_loader = AsClassLoader<kVerifyFlags, kReadBarrierOption>();
+ class_loader->VisitReferences<kVisitNativeRoots,
+ kVerifyFlags,
+ kReadBarrierOption>(klass, visitor);
}
} else if (kIsDebugBuild) {
- CHECK(!klass->IsClassClass());
- CHECK(!klass->IsObjectArrayClass());
+ CHECK((!klass->IsClassClass<kVerifyFlags, kReadBarrierOption>()));
+ CHECK((!klass->IsObjectArrayClass<kVerifyFlags, kReadBarrierOption>()));
// String still has instance fields for reflection purposes but these don't exist in
// actual string instances.
if (!klass->IsStringClass()) {
@@ -1147,7 +1164,7 @@
mirror::Class* super_class = klass;
do {
total_reference_instance_fields += super_class->NumReferenceInstanceFields();
- super_class = super_class->GetSuperClass();
+ super_class = super_class->GetSuperClass<kVerifyFlags, kReadBarrierOption>();
} while (super_class != nullptr);
// The only reference field should be the object's class. This field is handled at the
// beginning of the function.
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index 71e704e..d635002 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -164,14 +164,18 @@
template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
ObjectArray<T>* AsObjectArray() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsClassLoader() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ClassLoader* AsClassLoader() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsDexCache() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
DexCache* AsDexCache() SHARED_REQUIRES(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
@@ -456,6 +460,13 @@
SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
field_offset, new_value, sizeof(void*));
}
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
+ void SetFieldPtr64(MemberOffset field_offset, T new_value)
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
+ field_offset, new_value, 8u);
+ }
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
@@ -475,7 +486,9 @@
}
// TODO fix thread safety analysis broken by the use of template. This should be
// SHARED_REQUIRES(Locks::mutator_lock_).
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ template <bool kVisitNativeRoots = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
typename Visitor,
typename JavaLangRefVisitor = VoidFunctor>
void VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor)
@@ -495,6 +508,11 @@
SHARED_REQUIRES(Locks::mutator_lock_) {
return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, sizeof(void*));
}
+ template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ T GetFieldPtr64(MemberOffset field_offset)
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, 8u);
+ }
template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
ALWAYS_INLINE T GetFieldPtrWithSize(MemberOffset field_offset, size_t pointer_size)
@@ -511,13 +529,20 @@
}
// TODO: Fixme when anotatalysis works with visitors.
- template<bool kIsStatic, typename Visitor>
+ template<bool kIsStatic,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) HOT_ATTR
NO_THREAD_SAFETY_ANALYSIS;
- template<typename Visitor>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR
SHARED_REQUIRES(Locks::mutator_lock_);
- template<typename Visitor>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR
SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/runtime/mirror/object_array-inl.h b/runtime/mirror/object_array-inl.h
index 5337760..6f9d642 100644
--- a/runtime/mirror/object_array-inl.h
+++ b/runtime/mirror/object_array-inl.h
@@ -55,13 +55,13 @@
Runtime::Current()->GetHeap()->GetCurrentAllocator());
}
-template<class T>
+template<class T> template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline T* ObjectArray<T>::Get(int32_t i) {
if (!CheckIsValidIndex(i)) {
DCHECK(Thread::Current()->IsExceptionPending());
return nullptr;
}
- return GetFieldObject<T>(OffsetOfElement(i));
+ return GetFieldObject<T, kVerifyFlags, kReadBarrierOption>(OffsetOfElement(i));
}
template<class T> template<VerifyObjectFlags kVerifyFlags>
diff --git a/runtime/mirror/object_array.h b/runtime/mirror/object_array.h
index b45cafd..1b1295c 100644
--- a/runtime/mirror/object_array.h
+++ b/runtime/mirror/object_array.h
@@ -37,7 +37,9 @@
static ObjectArray<T>* Alloc(Thread* self, Class* object_array_class, int32_t length)
SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
- T* Get(int32_t i) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+ ALWAYS_INLINE T* Get(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_);
// Returns true if the object can be stored into the array. If not, throws
// an ArrayStoreException and returns false.
diff --git a/runtime/native/dalvik_system_DexFile.cc b/runtime/native/dalvik_system_DexFile.cc
index da6cf1f..6643ac2 100644
--- a/runtime/native/dalvik_system_DexFile.cc
+++ b/runtime/native/dalvik_system_DexFile.cc
@@ -154,10 +154,8 @@
jstring javaSourceName,
jstring javaOutputName,
jint flags ATTRIBUTE_UNUSED,
- // class_loader will be used for app images.
- jobject class_loader ATTRIBUTE_UNUSED,
- // dex_elements will be used for app images.
- jobject dex_elements ATTRIBUTE_UNUSED) {
+ jobject class_loader,
+ jobjectArray dex_elements) {
ScopedUtfChars sourceName(env, javaSourceName);
if (sourceName.c_str() == nullptr) {
return 0;
@@ -174,6 +172,8 @@
dex_files = runtime->GetOatFileManager().OpenDexFilesFromOat(sourceName.c_str(),
outputName.c_str(),
+ class_loader,
+ dex_elements,
/*out*/ &oat_file,
/*out*/ &error_msgs);
diff --git a/runtime/oat_file_assistant.cc b/runtime/oat_file_assistant.cc
index d6b0868..2bd5c76 100644
--- a/runtime/oat_file_assistant.cc
+++ b/runtime/oat_file_assistant.cc
@@ -38,6 +38,7 @@
#include "os.h"
#include "profiler.h"
#include "runtime.h"
+#include "scoped_thread_state_change.h"
#include "ScopedFd.h"
#include "utils.h"
@@ -326,6 +327,17 @@
return cached_odex_file_is_up_to_date_;
}
+std::string OatFileAssistant::ArtFileName(const OatFile* oat_file) const {
+ const std::string oat_file_location = oat_file->GetLocation();
+ // Replace extension with .art
+ const size_t last_ext = oat_file_location.find_last_of('.');
+ if (last_ext == std::string::npos) {
+ LOG(ERROR) << "No extension in oat file " << oat_file_location;
+ return std::string();
+ }
+ return oat_file_location.substr(0, last_ext) + ".art";
+}
+
const std::string* OatFileAssistant::OatFileName() {
if (!cached_oat_file_name_attempted_) {
cached_oat_file_name_attempted_ = true;
@@ -1003,5 +1015,22 @@
return old_profile_load_succeeded_ ? &cached_old_profile_ : nullptr;
}
+gc::space::ImageSpace* OatFileAssistant::OpenImageSpace(const OatFile* oat_file) {
+ DCHECK(oat_file != nullptr);
+ std::string art_file = ArtFileName(oat_file);
+ if (art_file.empty()) {
+ return nullptr;
+ }
+ std::string error_msg;
+ ScopedObjectAccess soa(Thread::Current());
+ gc::space::ImageSpace* ret = gc::space::ImageSpace::CreateFromAppImage(art_file.c_str(),
+ oat_file,
+ &error_msg);
+ if (ret == nullptr) {
+ LOG(INFO) << "Failed to open app image " << art_file.c_str() << " " << error_msg;
+ }
+ return ret;
+}
+
} // namespace art
diff --git a/runtime/oat_file_assistant.h b/runtime/oat_file_assistant.h
index f781532..7b45bca 100644
--- a/runtime/oat_file_assistant.h
+++ b/runtime/oat_file_assistant.h
@@ -30,6 +30,12 @@
namespace art {
+namespace gc {
+namespace space {
+class ImageSpace;
+} // namespace space
+} // namespace gc
+
// Class for assisting with oat file management.
//
// This class collects common utilities for determining the status of an oat
@@ -163,6 +169,9 @@
// the OatFileAssistant object.
std::unique_ptr<OatFile> GetBestOatFile();
+ // Open and returns an image space associated with the oat file.
+ gc::space::ImageSpace* OpenImageSpace(const OatFile* oat_file);
+
// Loads the dex files in the given oat file for the given dex location.
// The oat file should be up to date for the given dex location.
// This loads multiple dex files in the case of multidex.
@@ -214,6 +223,9 @@
bool OatFileNeedsRelocation();
bool OatFileIsUpToDate();
+ // Return image file name. Does not cache since it relies on the oat file.
+ std::string ArtFileName(const OatFile* oat_file) const;
+
// These methods return the status for a given opened oat file with respect
// to the dex location.
OatStatus GivenOatFileStatus(const OatFile& file);
diff --git a/runtime/oat_file_assistant_test.cc b/runtime/oat_file_assistant_test.cc
index f994f0c..25dcbe4 100644
--- a/runtime/oat_file_assistant_test.cc
+++ b/runtime/oat_file_assistant_test.cc
@@ -996,6 +996,8 @@
dex_files = Runtime::Current()->GetOatFileManager().OpenDexFilesFromOat(
dex_location_.c_str(),
oat_location_.c_str(),
+ /*class_loader*/nullptr,
+ /*dex_elements*/nullptr,
&oat_file,
&error_msgs);
CHECK(!dex_files.empty()) << Join(error_msgs, '\n');
diff --git a/runtime/oat_file_manager.cc b/runtime/oat_file_manager.cc
index 7f216f9..b34b550 100644
--- a/runtime/oat_file_manager.cc
+++ b/runtime/oat_file_manager.cc
@@ -22,9 +22,13 @@
#include "base/logging.h"
#include "base/stl_util.h"
+#include "class_linker.h"
#include "dex_file-inl.h"
#include "gc/space/image_space.h"
+#include "handle_scope-inl.h"
+#include "mirror/class_loader.h"
#include "oat_file_assistant.h"
+#include "scoped_thread_state_change.h"
#include "thread-inl.h"
namespace art {
@@ -34,6 +38,9 @@
// normal builds.
static constexpr bool kDuplicateClassesCheck = kIsDebugBuild;
+// If true, then we attempt to load the application image if it exists.
+static constexpr bool kEnableAppImage = true;
+
const OatFile* OatFileManager::RegisterOatFile(std::unique_ptr<const OatFile> oat_file) {
WriterMutexLock mu(Thread::Current(), *Locks::oat_file_manager_lock_);
DCHECK(oat_file != nullptr);
@@ -284,6 +291,8 @@
std::vector<std::unique_ptr<const DexFile>> OatFileManager::OpenDexFilesFromOat(
const char* dex_location,
const char* oat_location,
+ jobject class_loader,
+ jobjectArray dex_elements,
const OatFile** out_oat_file,
std::vector<std::string>* error_msgs) {
CHECK(dex_location != nullptr);
@@ -291,12 +300,13 @@
// Verify we aren't holding the mutator lock, which could starve GC if we
// have to generate or relocate an oat file.
- Locks::mutator_lock_->AssertNotHeld(Thread::Current());
-
+ Thread* const self = Thread::Current();
+ Locks::mutator_lock_->AssertNotHeld(self);
+ Runtime* const runtime = Runtime::Current();
OatFileAssistant oat_file_assistant(dex_location,
oat_location,
kRuntimeISA,
- !Runtime::Current()->IsAotCompiler());
+ !runtime->IsAotCompiler());
// Lock the target oat location to avoid races generating and loading the
// oat file.
@@ -317,6 +327,7 @@
// Get the oat file on disk.
std::unique_ptr<const OatFile> oat_file(oat_file_assistant.GetBestOatFile().release());
+
if (oat_file != nullptr) {
// Take the file only if it has no collisions, or we must take it because of preopting.
bool accept_oat_file = !HasCollisions(oat_file.get(), /*out*/ &error_msg);
@@ -351,7 +362,42 @@
// Load the dex files from the oat file.
if (source_oat_file != nullptr) {
- dex_files = oat_file_assistant.LoadDexFiles(*source_oat_file, dex_location);
+ bool added_image_space = false;
+ if (source_oat_file->IsExecutable()) {
+ std::unique_ptr<gc::space::ImageSpace> image_space(
+ kEnableAppImage ? oat_file_assistant.OpenImageSpace(source_oat_file) : nullptr);
+ if (image_space != nullptr) {
+ ScopedObjectAccess soa(self);
+ StackHandleScope<1> hs(self);
+ Handle<mirror::ClassLoader> h_loader(
+ hs.NewHandle(soa.Decode<mirror::ClassLoader*>(class_loader)));
+ // Can not load app image without class loader.
+ if (h_loader.Get() != nullptr) {
+ std::string temp_error_msg;
+ // Add image space has a race condition since other threads could be reading from the
+ // spaces array.
+ runtime->GetHeap()->AddSpace(image_space.get());
+ added_image_space = true;
+ if (!runtime->GetClassLinker()->AddImageSpace(image_space.get(),
+ h_loader,
+ dex_elements,
+ dex_location,
+ /*out*/&dex_files,
+ /*out*/&temp_error_msg)) {
+ LOG(INFO) << "Failed to add image file " << temp_error_msg;
+ dex_files.clear();
+ runtime->GetHeap()->RemoveSpace(image_space.get());
+ added_image_space = false;
+ // Non-fatal, don't update error_msg.
+ }
+ image_space.release();
+ }
+ }
+ }
+ if (!added_image_space) {
+ DCHECK(dex_files.empty());
+ dex_files = oat_file_assistant.LoadDexFiles(*source_oat_file, dex_location);
+ }
if (dex_files.empty()) {
error_msgs->push_back("Failed to open dex files from " + source_oat_file->GetLocation());
}
diff --git a/runtime/oat_file_manager.h b/runtime/oat_file_manager.h
index 4690e45..c508c4b 100644
--- a/runtime/oat_file_manager.h
+++ b/runtime/oat_file_manager.h
@@ -25,6 +25,7 @@
#include "base/macros.h"
#include "base/mutex.h"
+#include "jni.h"
namespace art {
@@ -101,6 +102,8 @@
std::vector<std::unique_ptr<const DexFile>> OpenDexFilesFromOat(
const char* dex_location,
const char* oat_location,
+ jobject class_loader,
+ jobjectArray dex_elements,
/*out*/ const OatFile** out_oat_file,
/*out*/ std::vector<std::string>* error_msgs)
REQUIRES(!Locks::oat_file_manager_lock_, !Locks::mutator_lock_);
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index e30c26d..f138c81 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -1097,7 +1097,7 @@
if (GetHeap()->HasBootImageSpace()) {
ATRACE_BEGIN("InitFromImage");
std::string error_msg;
- bool result = class_linker_->InitFromImage(&error_msg);
+ bool result = class_linker_->InitFromBootImage(&error_msg);
ATRACE_END();
if (!result) {
LOG(ERROR) << "Could not initialize from image: " << error_msg;
diff --git a/runtime/runtime.h b/runtime/runtime.h
index 20acffb..c8c2ee5 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -366,7 +366,7 @@
SHARED_REQUIRES(Locks::mutator_lock_);
// Returns a special method that calls into a trampoline for runtime method resolution
- ArtMethod* GetResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_);
+ ArtMethod* GetResolutionMethod();
bool HasResolutionMethod() const {
return resolution_method_ != nullptr;
@@ -377,8 +377,8 @@
ArtMethod* CreateResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_);
// Returns a special method that calls into a trampoline for runtime imt conflicts.
- ArtMethod* GetImtConflictMethod() SHARED_REQUIRES(Locks::mutator_lock_);
- ArtMethod* GetImtUnimplementedMethod() SHARED_REQUIRES(Locks::mutator_lock_);
+ ArtMethod* GetImtConflictMethod();
+ ArtMethod* GetImtUnimplementedMethod();
bool HasImtConflictMethod() const {
return imt_conflict_method_ != nullptr;
diff --git a/runtime/thread_list.cc b/runtime/thread_list.cc
index 727ffe5..c8714a6 100644
--- a/runtime/thread_list.cc
+++ b/runtime/thread_list.cc
@@ -1191,6 +1191,7 @@
}
LOG(WARNING) << "timed out suspending all daemon threads";
}
+
void ThreadList::Register(Thread* self) {
DCHECK_EQ(self, Thread::Current());
diff --git a/runtime/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h
index 90e24b9..f6ee6a2 100644
--- a/runtime/utils/dex_cache_arrays_layout-inl.h
+++ b/runtime/utils/dex_cache_arrays_layout-inl.h
@@ -60,7 +60,9 @@
}
inline size_t DexCacheArraysLayout::TypesSize(size_t num_elements) const {
- return ArraySize(sizeof(GcRoot<mirror::Class>), num_elements);
+ // App image patching relies on having enough room for a forwarding pointer in the types array.
+ // See FixupArtMethodArrayVisitor and ClassLinker::AddImageSpace.
+ return std::max(ArraySize(sizeof(GcRoot<mirror::Class>), num_elements), pointer_size_);
}
inline size_t DexCacheArraysLayout::TypesAlignment() const {
@@ -72,7 +74,8 @@
}
inline size_t DexCacheArraysLayout::MethodsSize(size_t num_elements) const {
- return ArraySize(pointer_size_, num_elements);
+ // App image patching relies on having enough room for a forwarding pointer in the methods array.
+ return std::max(ArraySize(pointer_size_, num_elements), pointer_size_);
}
inline size_t DexCacheArraysLayout::MethodsAlignment() const {
diff --git a/runtime/well_known_classes.cc b/runtime/well_known_classes.cc
index 8300921..cfa8329 100644
--- a/runtime/well_known_classes.cc
+++ b/runtime/well_known_classes.cc
@@ -129,6 +129,7 @@
jmethodID WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch;
jfieldID WellKnownClasses::dalvik_system_DexFile_cookie;
+jfieldID WellKnownClasses::dalvik_system_DexFile_fileName;
jfieldID WellKnownClasses::dalvik_system_PathClassLoader_pathList;
jfieldID WellKnownClasses::dalvik_system_DexPathList_dexElements;
jfieldID WellKnownClasses::dalvik_system_DexPathList__Element_dexFile;
@@ -333,6 +334,7 @@
true, "newStringFromStringBuilder", "(Ljava/lang/StringBuilder;)Ljava/lang/String;");
dalvik_system_DexFile_cookie = CacheField(env, dalvik_system_DexFile, false, "mCookie", "Ljava/lang/Object;");
+ dalvik_system_DexFile_fileName = CacheField(env, dalvik_system_DexFile, false, "mFileName", "Ljava/lang/String;");
dalvik_system_PathClassLoader_pathList = CacheField(env, dalvik_system_PathClassLoader, false, "pathList", "Ldalvik/system/DexPathList;");
dalvik_system_DexPathList_dexElements = CacheField(env, dalvik_system_DexPathList, false, "dexElements", "[Ldalvik/system/DexPathList$Element;");
dalvik_system_DexPathList__Element_dexFile = CacheField(env, dalvik_system_DexPathList__Element, false, "dexFile", "Ldalvik/system/DexFile;");
diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h
index 55158a7..482ff0a 100644
--- a/runtime/well_known_classes.h
+++ b/runtime/well_known_classes.h
@@ -140,6 +140,7 @@
static jmethodID org_apache_harmony_dalvik_ddmc_DdmServer_dispatch;
static jfieldID dalvik_system_DexFile_cookie;
+ static jfieldID dalvik_system_DexFile_fileName;
static jfieldID dalvik_system_DexPathList_dexElements;
static jfieldID dalvik_system_DexPathList__Element_dexFile;
static jfieldID dalvik_system_PathClassLoader_pathList;